
from IPython.display import HTML
HTML("""<style>h1,h2,h3 {margin: 1em 0 0.5em 0;font-weight: 600;font-family:'Titillium Web', sans-serif;position: relative; font-size: 36px;line-height: 40px;padding: 15px 15px 15px 2.5%;color: #00018D;box-shadow: inset 0 0 0 1px rgba(97,0,45, 1), inset 0 0 5px rgba(53,86,129, 1),inset -285px 0 35px #F2D8FF;border-radius: 0 10px 0 15px;background: #FFD8B2}</style>""")
import os
import cv2
import sys
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from sklearn.model_selection import train_test_split
from tensorflow import set_random_seed
from keras.preprocessing.image import ImageDataGenerator
from keras.preprocessing.image import load_img
from keras.preprocessing.image import img_to_array
from keras.applications.resnet50 import ResNet50
from keras.applications import DenseNet121
from keras.applications import InceptionResNetV2
from keras.applications import Xception
from keras.models import Model
from keras.layers import Input
from keras.layers.core import Dropout
from keras.layers.core import Flatten
from keras.layers.core import Dense
from keras.activations import softmax
from keras.activations import elu
from mlxtend.plotting import plot_confusion_matrix
from sklearn.metrics import classification_report, confusion_matrix
from prettytable import PrettyTable
numpy == 1.18.5
pandas == 1.1.3
seaborn ==0.11.0
SEED = 7
np.random.seed(SEED)
set_random_seed(SEED)
dir_path = ""
IMG_DIM = 299 # 224 399 #
BATCH_SIZE = 12
CHANNEL_SIZE = 3
NUM_EPOCHS = 60
TRAIN_DIR = 'train_images'
TEST_DIR = 'test_images'
FREEZE_LAYERS = 2
CLASSS = {0: "No DR", 1: "Mild", 2: "Moderate", 3: "Severe", 4: "Proliferative DR"}
df_train = pd.read_csv(os.path.join(dir_path, "train.csv"))
df_test = pd.read_csv(os.path.join(dir_path, "test.csv"))
NUM_CLASSES = df_train['diagnosis'].nunique()
print("Training set has {} samples and {} classes.".format(df_train.shape[0], df_train.shape[1]))
print("Testing set has {} samples and {} classes.".format(df_test.shape[0], df_test.shape[1]))
Training set has 3662 samples and 2 classes. Testing set has 1928 samples and 1 classes.
as per below bar chart, it clearly showing that data set is quite imbalance. And even it's expected in medical domain.
# Plot pie chart
labels = 'Train', 'Test'
sizes = len(df_train), len(df_test)
fig1, ax1 = plt.subplots(figsize=(5,5))
ax1.pie(sizes, labels=labels, autopct='%1.1f%%', shadow=True, startangle=90)
ax1.axis('equal')
plt.title('Train and Test sets')
plt.show()
Both training and testing datasets are not too large.
Training dataset is about 3 times greater than the testing dataset.
Stages Of Diabetic Retinopathy
# Plot pie chart
labels = 'No DR', 'Moderate', 'Mild', 'Proliferative DR', 'Severe'
sizes = train.diagnosis.value_counts()
fig1, ax1 = plt.subplots(figsize=(10,7))
ax1.pie(sizes, labels=labels, autopct='%1.1f%%', shadow=True, startangle=90)
ax1.axis('equal')
plt.title('Diabetic retinopathy condition labels')
plt.show()
x_train, X_test, y_train, y_test = train_test_split(df_train.id_code, df_train.diagnosis, test_size=0.2,
random_state=SEED, stratify=df_train.diagnosis)
def draw_img(imgs, target_dir, class_label='0'):
fig, axis = plt.subplots(2, 6, figsize=(15, 6))
for idnx, (idx, row) in enumerate(imgs.iterrows()):
imgPath = os.path.join(dir_path, f"{target_dir}/{row['id_code']}.png")
img = cv2.imread(imgPath)
row = idnx // 6
col = idnx % 6
img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
axis[row, col].imshow(img)
plt.suptitle(class_label)
plt.show()
CLASS_ID = 0
draw_img(df_train[df_train.diagnosis == CLASS_ID].head(12), 'train_images', CLASSS[CLASS_ID])
CLASS_ID = 1
draw_img(df_train[df_train.diagnosis == CLASS_ID].head(12), 'train_images', CLASSS[CLASS_ID])
CLASS_ID = 2
draw_img(df_train[df_train.diagnosis == CLASS_ID].head(12), 'train_images', CLASSS[CLASS_ID])
CLASS_ID = 3
draw_img(df_train[df_train.diagnosis == CLASS_ID].head(12), 'train_images', CLASSS[CLASS_ID])
CLASS_ID = 4
draw_img(df_train[df_train.diagnosis == CLASS_ID].head(12), 'train_images', CLASSS[CLASS_ID])
CLASS_ID = 'Test DataSet'
draw_img(df_test.sample(12, random_state=SEED), 'test_images', CLASS_ID)
def check_max_min_img_height_width(df, img_dir):
max_Height , max_Width =0 ,0
min_Height , min_Width =sys.maxsize ,sys.maxsize
for idx, row in df.iterrows():
imgPath=os.path.join(dir_path,f"{img_dir}/{row['id_code']}.png")
img=cv2.imread(imgPath)
H,W=img.shape[:2]
max_Height=max(H,max_Height)
max_Width =max(W,max_Width)
min_Height=min(H,min_Height)
min_Width =min(W,min_Width)
return max_Height, max_Width, min_Height, min_Width
check_max_min_img_height_width(df_train, TRAIN_DIR)
(2848, 4288, 358, 474)
check_max_min_img_height_width(df_test, TEST_DIR)
(1958, 2896, 480, 640)
Converting the Ratina Images into Grayscale. So, we can usnderstand the regin or intest .
figure = plt.figure(figsize=(20, 16))
for target_class in (y_train.unique()):
for i, (idx, row) in enumerate(
df_train.loc[df_train.diagnosis == target_class].sample(5, random_state=SEED).iterrows()):
ax = figure.add_subplot(5, 5, target_class * 5 + i + 1)
imagefile = f"train_images/{row['id_code']}.png"
img = cv2.imread(imagefile)
img = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
img = cv2.resize(img, (IMG_DIM, IMG_DIM))
plt.imshow(img, cmap='gray')
ax.set_title(CLASSS[target_class])
It's clearly showing, that the image [0,1] has give regin black around the EYE ball. Which is ust noise, that will not add any value fo model. We need to remove this black area. in my next iteration will work on that to crop black are from image.
Some images has big blank space. they will take only computation power and add noise to model. So better will will crop the blank spaces from images.
def draw_img_light(imgs, target_dir, class_label='0'):
fig, axis = plt.subplots(2, 6, figsize=(15, 6))
for idnx, (idx, row) in enumerate(imgs.iterrows()):
imgPath = os.path.join(dir_path, f"{target_dir}/{row['id_code']}.png")
img = cv2.imread(imgPath)
row = idnx // 6
col = idnx % 6
img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
img = cv2.resize(img, (IMG_DIM, IMG_DIM))
img=cv2.addWeighted ( img,4, cv2.GaussianBlur( img , (0,0) , IMG_DIM/10) ,-4 ,128) # the trick is to add this line
img = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
axis[row, col].imshow(img, cmap='gray')
plt.suptitle(class_label)
plt.show()
CLASS_ID = 3
draw_img_light(df_train[df_train.diagnosis == CLASS_ID].head(12), 'train_images', CLASSS[CLASS_ID])
def crop_image1(img,tol=7):
mask = img>tol
return img[np.ix_(mask.any(1),mask.any(0))]
def crop_image_from_gray(img,tol=7):
if img.ndim== 2:
mask=img>tol
elif img.ndim==3:
gray_img=cv2.cvtColor(img,cv2.COLOR_RGB2GRAY)
mask=gray_img>tol
check_shape = img[:,:,0][np.ix_(mask.any(1),mask.any(0))].shape[0]
if check_shape ==0:
return img
else:
img1=img[:,:,0][np.ix_(mask.any(1),mask.any(0))]
img2=img[:,:,1][np.ix_(mask.any(1),mask.any(0))]
img3=img[:,:,2][np.ix_(mask.any(1),mask.any(0))]
print(img1.shape,img2.shape,img3.shape)
img=np.stack([img1,img2,img3],axis=1)
print(img.shape)
return img
def load_ben_color(path, sigmaX=10):
image = cv2.imread(path)
image = crop_image_from_gray(image)
image = cv2.resize(image, (IMG_DIM, IMG_DIM))
image=cv2.addWeighted ( image,4, cv2.GaussianBlur( image , (0,0) , sigmaX) ,-4 ,128)
return image
def crop_image(img,tol=7):
w, h = img.shape[1],img.shape[0]
gray_img = cv2.cvtColor(img, cv2.COLOR_RGB2GRAY)
gray_img = cv2.blur(gray_img,(5,5))
shape = gray_img.shape
gray_img = gray_img.reshape(-1,1)
quant = quantile_transform(gray_img, n_quantiles=256, random_state=0, copy=True)
quant = (quant*256).astype(int)
gray_img = quant.reshape(shape)
xp = (gray_img.mean(axis=0)>tol)
yp = (gray_img.mean(axis=1)>tol)
x1, x2 = np.argmax(xp), w-np.argmax(np.flip(xp))
y1, y2 = np.argmax(yp), h-np.argmax(np.flip(yp))
if x1 >= x2 or y1 >= y2 : # something wrong with the crop
return img # return original image
else:
img1=img[y1:y2,x1:x2,0]
img2=img[y1:y2,x1:x2,1]
img3=img[y1:y2,x1:x2,2]
img = np.stack([img1,img2,img3],axis=-1)
return img
def process_image(image, size=512):
image = cv2.resize(image, (size,int(size*image.shape[0]/image.shape[1])))
image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
try:
image = crop_image(image, tol=15)
except Exception as e:
image = image
print( str(e) )
return image
figure = plt.figure(figsize=(20, 16))
for target_class in (y_train.unique()):
# print(CLASSS[target_class],target_class)
for i, (idx, row) in enumerate(
df_train.loc[df_train.diagnosis == target_class].sample(5, random_state=SEED).iterrows()):
ax = figure.add_subplot(5, 5, target_class * 5 + i + 1)
imagefile = f"train_images/{row['id_code']}.png"
img = cv2.imread(imagefile)
img = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
img = cv2.resize(img, (IMG_DIM, IMG_DIM))
img = cv2.addWeighted(img, 4, cv2.GaussianBlur(img, (0, 0), IMG_DIM / 10), -4, 128)
plt.imshow(img, cmap='gray')
ax.set_title('%s-%d-%s' % (CLASSS[target_class], idx, row['id_code']))
imgPath = f"train_images/cd54d022e37d.png"
img = cv2.imread(imgPath)
gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
_, thresh = cv2.threshold(gray, 1, 255, cv2.THRESH_BINARY)
contours, hierarchy = cv2.findContours(thresh, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
cnt = contours[0]
x, y, w, h = cv2.boundingRect(cnt)
img = img[y:y + h, x:x + w]
plt.imshow(img)
<matplotlib.image.AxesImage at 0x7f5916c185f8>
def random_crop(img, random_crop_size):
assert img.shape[2] == 3
height, width = img.shape[0], img.shape[1]
dy, dx = random_crop_size
x = np.random.randint(0, width - dx + 1)
y = np.random.randint(0, height - dy + 1)
img = img[y:(y + dy), x:(x + dx), :]
return img
def crop_generator(batches, crop_length):
while True:
batch_x, batch_y = next(batches)
batch_crops = np.zeros((batch_x.shape[0], crop_length, 3))
for i in range(batch_x.shape[0]):
batch_crops[0] = random_crop(batch_x[i], (crop_length, crop_length))
yield (batch_crops, batch_y)
df_train.id_code = df_train.id_code.apply(lambda x: x + ".png")
df_test.id_code = df_test.id_code.apply(lambda x: x + ".png")
df_train['diagnosis'] = df_train['diagnosis'].astype('str')
In this section willl use Keras ImageDataGenerator class for generating data for Keras model. It is used for data generation, increasing the data size. with the help of ImageDataGenerator we will do image "augment" via a number of random transformations, so that our model would never see twice the exact same picture.
Training Deep Learning model can perform better with more data, and augementation technique can create variations of data that can increase the ababiliy of fit model to gene
datagenerator=ImageDataGenerator(horizontal_flip=True, vertical_flip=True, rotation_range=40, zoom_range=0.2, shear_range=0.1,fill_mode='nearest')
imgPath = f"train_images/cd54d022e37d.png"
# Loading image
img = load_img(imgPath)
data = img_to_array(img)
samples =np.expand_dims(data, 0)
i=5
it=datagenerator.flow(samples , batch_size=1)
for i in range(5):
plt.subplot(230 + 1 + i)
batch = it.next()
image = batch[0].astype('uint8')
plt.imshow(image)
plt.show()
train_datagen = ImageDataGenerator(rescale=1. / 255,
validation_split=0.15,
horizontal_flip=True,
vertical_flip=True,
rotation_range=40,
zoom_range=0.2,
shear_range=0.1,
fill_mode='nearest')
train_generator = train_datagen.flow_from_dataframe(dataframe=df_train,
directory="train_images/",
x_col="id_code",
y_col="diagnosis",
batch_size=BATCH_SIZE,
class_mode="categorical",
target_size=(IMG_DIM, IMG_DIM),
subset='training',
shaffle=True,
seed=SEED,
)
valid_generator = train_datagen.flow_from_dataframe(dataframe=df_train,
directory="train_images/",
x_col="id_code",
y_col="diagnosis",
batch_size=BATCH_SIZE,
class_mode="categorical",
target_size=(IMG_DIM, IMG_DIM),
subset='validation',
shaffle=True,
seed=SEED
)
del x_train
del y_train
Found 3113 validated image filenames belonging to 5 classes. Found 549 validated image filenames belonging to 5 classes.
8191
resnet = ResNet50(weights="imagenet", include_top=False,
input_tensor=Input(shape=(224, 224, 3)))
outputs = resnet.output
outputs = Flatten(name="flatten")(outputs)
outputs = Dropout(0.5)(outputs)
outputs = Dense(5, activation="softmax")(outputs)
model_resnet = Model(inputs=resnet.input, outputs=outputs)
for layer in resnet.layers:
layer.trainable = False
model_resnet.compile(
loss='categorical_crossentropy',
optimizer='adam',
metrics=['accuracy']
)
model_resnet.summary()
Model: "functional_17"
__________________________________________________________________________________________________
Layer (type) Output Shape Param # Connected to
==================================================================================================
input_9 (InputLayer) [(None, 224, 224, 3) 0
__________________________________________________________________________________________________
conv1_pad (ZeroPadding2D) (None, 230, 230, 3) 0 input_9[0][0]
__________________________________________________________________________________________________
conv1_conv (Conv2D) (None, 112, 112, 64) 9472 conv1_pad[0][0]
__________________________________________________________________________________________________
conv1_bn (BatchNormalization) (None, 112, 112, 64) 256 conv1_conv[0][0]
__________________________________________________________________________________________________
conv1_relu (Activation) (None, 112, 112, 64) 0 conv1_bn[0][0]
__________________________________________________________________________________________________
pool1_pad (ZeroPadding2D) (None, 114, 114, 64) 0 conv1_relu[0][0]
__________________________________________________________________________________________________
pool1_pool (MaxPooling2D) (None, 56, 56, 64) 0 pool1_pad[0][0]
__________________________________________________________________________________________________
conv2_block1_1_conv (Conv2D) (None, 56, 56, 64) 4160 pool1_pool[0][0]
__________________________________________________________________________________________________
conv2_block1_1_bn (BatchNormali (None, 56, 56, 64) 256 conv2_block1_1_conv[0][0]
__________________________________________________________________________________________________
conv2_block1_1_relu (Activation (None, 56, 56, 64) 0 conv2_block1_1_bn[0][0]
__________________________________________________________________________________________________
conv2_block1_2_conv (Conv2D) (None, 56, 56, 64) 36928 conv2_block1_1_relu[0][0]
__________________________________________________________________________________________________
conv2_block1_2_bn (BatchNormali (None, 56, 56, 64) 256 conv2_block1_2_conv[0][0]
__________________________________________________________________________________________________
conv2_block1_2_relu (Activation (None, 56, 56, 64) 0 conv2_block1_2_bn[0][0]
__________________________________________________________________________________________________
conv2_block1_0_conv (Conv2D) (None, 56, 56, 256) 16640 pool1_pool[0][0]
__________________________________________________________________________________________________
conv2_block1_3_conv (Conv2D) (None, 56, 56, 256) 16640 conv2_block1_2_relu[0][0]
__________________________________________________________________________________________________
conv2_block1_0_bn (BatchNormali (None, 56, 56, 256) 1024 conv2_block1_0_conv[0][0]
__________________________________________________________________________________________________
conv2_block1_3_bn (BatchNormali (None, 56, 56, 256) 1024 conv2_block1_3_conv[0][0]
__________________________________________________________________________________________________
conv2_block1_add (Add) (None, 56, 56, 256) 0 conv2_block1_0_bn[0][0]
conv2_block1_3_bn[0][0]
__________________________________________________________________________________________________
conv2_block1_out (Activation) (None, 56, 56, 256) 0 conv2_block1_add[0][0]
__________________________________________________________________________________________________
conv2_block2_1_conv (Conv2D) (None, 56, 56, 64) 16448 conv2_block1_out[0][0]
__________________________________________________________________________________________________
conv2_block2_1_bn (BatchNormali (None, 56, 56, 64) 256 conv2_block2_1_conv[0][0]
__________________________________________________________________________________________________
conv2_block2_1_relu (Activation (None, 56, 56, 64) 0 conv2_block2_1_bn[0][0]
__________________________________________________________________________________________________
conv2_block2_2_conv (Conv2D) (None, 56, 56, 64) 36928 conv2_block2_1_relu[0][0]
__________________________________________________________________________________________________
conv2_block2_2_bn (BatchNormali (None, 56, 56, 64) 256 conv2_block2_2_conv[0][0]
__________________________________________________________________________________________________
conv2_block2_2_relu (Activation (None, 56, 56, 64) 0 conv2_block2_2_bn[0][0]
__________________________________________________________________________________________________
conv2_block2_3_conv (Conv2D) (None, 56, 56, 256) 16640 conv2_block2_2_relu[0][0]
__________________________________________________________________________________________________
conv2_block2_3_bn (BatchNormali (None, 56, 56, 256) 1024 conv2_block2_3_conv[0][0]
__________________________________________________________________________________________________
conv2_block2_add (Add) (None, 56, 56, 256) 0 conv2_block1_out[0][0]
conv2_block2_3_bn[0][0]
__________________________________________________________________________________________________
conv2_block2_out (Activation) (None, 56, 56, 256) 0 conv2_block2_add[0][0]
__________________________________________________________________________________________________
conv2_block3_1_conv (Conv2D) (None, 56, 56, 64) 16448 conv2_block2_out[0][0]
__________________________________________________________________________________________________
conv2_block3_1_bn (BatchNormali (None, 56, 56, 64) 256 conv2_block3_1_conv[0][0]
__________________________________________________________________________________________________
conv2_block3_1_relu (Activation (None, 56, 56, 64) 0 conv2_block3_1_bn[0][0]
__________________________________________________________________________________________________
conv2_block3_2_conv (Conv2D) (None, 56, 56, 64) 36928 conv2_block3_1_relu[0][0]
__________________________________________________________________________________________________
conv2_block3_2_bn (BatchNormali (None, 56, 56, 64) 256 conv2_block3_2_conv[0][0]
__________________________________________________________________________________________________
conv2_block3_2_relu (Activation (None, 56, 56, 64) 0 conv2_block3_2_bn[0][0]
__________________________________________________________________________________________________
conv2_block3_3_conv (Conv2D) (None, 56, 56, 256) 16640 conv2_block3_2_relu[0][0]
__________________________________________________________________________________________________
conv2_block3_3_bn (BatchNormali (None, 56, 56, 256) 1024 conv2_block3_3_conv[0][0]
__________________________________________________________________________________________________
conv2_block3_add (Add) (None, 56, 56, 256) 0 conv2_block2_out[0][0]
conv2_block3_3_bn[0][0]
__________________________________________________________________________________________________
conv2_block3_out (Activation) (None, 56, 56, 256) 0 conv2_block3_add[0][0]
__________________________________________________________________________________________________
conv3_block1_1_conv (Conv2D) (None, 28, 28, 128) 32896 conv2_block3_out[0][0]
__________________________________________________________________________________________________
conv3_block1_1_bn (BatchNormali (None, 28, 28, 128) 512 conv3_block1_1_conv[0][0]
__________________________________________________________________________________________________
conv3_block1_1_relu (Activation (None, 28, 28, 128) 0 conv3_block1_1_bn[0][0]
__________________________________________________________________________________________________
conv3_block1_2_conv (Conv2D) (None, 28, 28, 128) 147584 conv3_block1_1_relu[0][0]
__________________________________________________________________________________________________
conv3_block1_2_bn (BatchNormali (None, 28, 28, 128) 512 conv3_block1_2_conv[0][0]
__________________________________________________________________________________________________
conv3_block1_2_relu (Activation (None, 28, 28, 128) 0 conv3_block1_2_bn[0][0]
__________________________________________________________________________________________________
conv3_block1_0_conv (Conv2D) (None, 28, 28, 512) 131584 conv2_block3_out[0][0]
__________________________________________________________________________________________________
conv3_block1_3_conv (Conv2D) (None, 28, 28, 512) 66048 conv3_block1_2_relu[0][0]
__________________________________________________________________________________________________
conv3_block1_0_bn (BatchNormali (None, 28, 28, 512) 2048 conv3_block1_0_conv[0][0]
__________________________________________________________________________________________________
conv3_block1_3_bn (BatchNormali (None, 28, 28, 512) 2048 conv3_block1_3_conv[0][0]
__________________________________________________________________________________________________
conv3_block1_add (Add) (None, 28, 28, 512) 0 conv3_block1_0_bn[0][0]
conv3_block1_3_bn[0][0]
__________________________________________________________________________________________________
conv3_block1_out (Activation) (None, 28, 28, 512) 0 conv3_block1_add[0][0]
__________________________________________________________________________________________________
conv3_block2_1_conv (Conv2D) (None, 28, 28, 128) 65664 conv3_block1_out[0][0]
__________________________________________________________________________________________________
conv3_block2_1_bn (BatchNormali (None, 28, 28, 128) 512 conv3_block2_1_conv[0][0]
__________________________________________________________________________________________________
conv3_block2_1_relu (Activation (None, 28, 28, 128) 0 conv3_block2_1_bn[0][0]
__________________________________________________________________________________________________
conv3_block2_2_conv (Conv2D) (None, 28, 28, 128) 147584 conv3_block2_1_relu[0][0]
__________________________________________________________________________________________________
conv3_block2_2_bn (BatchNormali (None, 28, 28, 128) 512 conv3_block2_2_conv[0][0]
__________________________________________________________________________________________________
conv3_block2_2_relu (Activation (None, 28, 28, 128) 0 conv3_block2_2_bn[0][0]
__________________________________________________________________________________________________
conv3_block2_3_conv (Conv2D) (None, 28, 28, 512) 66048 conv3_block2_2_relu[0][0]
__________________________________________________________________________________________________
conv3_block2_3_bn (BatchNormali (None, 28, 28, 512) 2048 conv3_block2_3_conv[0][0]
__________________________________________________________________________________________________
conv3_block2_add (Add) (None, 28, 28, 512) 0 conv3_block1_out[0][0]
conv3_block2_3_bn[0][0]
__________________________________________________________________________________________________
conv3_block2_out (Activation) (None, 28, 28, 512) 0 conv3_block2_add[0][0]
__________________________________________________________________________________________________
conv3_block3_1_conv (Conv2D) (None, 28, 28, 128) 65664 conv3_block2_out[0][0]
__________________________________________________________________________________________________
conv3_block3_1_bn (BatchNormali (None, 28, 28, 128) 512 conv3_block3_1_conv[0][0]
__________________________________________________________________________________________________
conv3_block3_1_relu (Activation (None, 28, 28, 128) 0 conv3_block3_1_bn[0][0]
__________________________________________________________________________________________________
conv3_block3_2_conv (Conv2D) (None, 28, 28, 128) 147584 conv3_block3_1_relu[0][0]
__________________________________________________________________________________________________
conv3_block3_2_bn (BatchNormali (None, 28, 28, 128) 512 conv3_block3_2_conv[0][0]
__________________________________________________________________________________________________
conv3_block3_2_relu (Activation (None, 28, 28, 128) 0 conv3_block3_2_bn[0][0]
__________________________________________________________________________________________________
conv3_block3_3_conv (Conv2D) (None, 28, 28, 512) 66048 conv3_block3_2_relu[0][0]
__________________________________________________________________________________________________
conv3_block3_3_bn (BatchNormali (None, 28, 28, 512) 2048 conv3_block3_3_conv[0][0]
__________________________________________________________________________________________________
conv3_block3_add (Add) (None, 28, 28, 512) 0 conv3_block2_out[0][0]
conv3_block3_3_bn[0][0]
__________________________________________________________________________________________________
conv3_block3_out (Activation) (None, 28, 28, 512) 0 conv3_block3_add[0][0]
__________________________________________________________________________________________________
conv3_block4_1_conv (Conv2D) (None, 28, 28, 128) 65664 conv3_block3_out[0][0]
__________________________________________________________________________________________________
conv3_block4_1_bn (BatchNormali (None, 28, 28, 128) 512 conv3_block4_1_conv[0][0]
__________________________________________________________________________________________________
conv3_block4_1_relu (Activation (None, 28, 28, 128) 0 conv3_block4_1_bn[0][0]
__________________________________________________________________________________________________
conv3_block4_2_conv (Conv2D) (None, 28, 28, 128) 147584 conv3_block4_1_relu[0][0]
__________________________________________________________________________________________________
conv3_block4_2_bn (BatchNormali (None, 28, 28, 128) 512 conv3_block4_2_conv[0][0]
__________________________________________________________________________________________________
conv3_block4_2_relu (Activation (None, 28, 28, 128) 0 conv3_block4_2_bn[0][0]
__________________________________________________________________________________________________
conv3_block4_3_conv (Conv2D) (None, 28, 28, 512) 66048 conv3_block4_2_relu[0][0]
__________________________________________________________________________________________________
conv3_block4_3_bn (BatchNormali (None, 28, 28, 512) 2048 conv3_block4_3_conv[0][0]
__________________________________________________________________________________________________
conv3_block4_add (Add) (None, 28, 28, 512) 0 conv3_block3_out[0][0]
conv3_block4_3_bn[0][0]
__________________________________________________________________________________________________
conv3_block4_out (Activation) (None, 28, 28, 512) 0 conv3_block4_add[0][0]
__________________________________________________________________________________________________
conv4_block1_1_conv (Conv2D) (None, 14, 14, 256) 131328 conv3_block4_out[0][0]
__________________________________________________________________________________________________
conv4_block1_1_bn (BatchNormali (None, 14, 14, 256) 1024 conv4_block1_1_conv[0][0]
__________________________________________________________________________________________________
conv4_block1_1_relu (Activation (None, 14, 14, 256) 0 conv4_block1_1_bn[0][0]
__________________________________________________________________________________________________
conv4_block1_2_conv (Conv2D) (None, 14, 14, 256) 590080 conv4_block1_1_relu[0][0]
__________________________________________________________________________________________________
conv4_block1_2_bn (BatchNormali (None, 14, 14, 256) 1024 conv4_block1_2_conv[0][0]
__________________________________________________________________________________________________
conv4_block1_2_relu (Activation (None, 14, 14, 256) 0 conv4_block1_2_bn[0][0]
__________________________________________________________________________________________________
conv4_block1_0_conv (Conv2D) (None, 14, 14, 1024) 525312 conv3_block4_out[0][0]
__________________________________________________________________________________________________
conv4_block1_3_conv (Conv2D) (None, 14, 14, 1024) 263168 conv4_block1_2_relu[0][0]
__________________________________________________________________________________________________
conv4_block1_0_bn (BatchNormali (None, 14, 14, 1024) 4096 conv4_block1_0_conv[0][0]
__________________________________________________________________________________________________
conv4_block1_3_bn (BatchNormali (None, 14, 14, 1024) 4096 conv4_block1_3_conv[0][0]
__________________________________________________________________________________________________
conv4_block1_add (Add) (None, 14, 14, 1024) 0 conv4_block1_0_bn[0][0]
conv4_block1_3_bn[0][0]
__________________________________________________________________________________________________
conv4_block1_out (Activation) (None, 14, 14, 1024) 0 conv4_block1_add[0][0]
__________________________________________________________________________________________________
conv4_block2_1_conv (Conv2D) (None, 14, 14, 256) 262400 conv4_block1_out[0][0]
__________________________________________________________________________________________________
conv4_block2_1_bn (BatchNormali (None, 14, 14, 256) 1024 conv4_block2_1_conv[0][0]
__________________________________________________________________________________________________
conv4_block2_1_relu (Activation (None, 14, 14, 256) 0 conv4_block2_1_bn[0][0]
__________________________________________________________________________________________________
conv4_block2_2_conv (Conv2D) (None, 14, 14, 256) 590080 conv4_block2_1_relu[0][0]
__________________________________________________________________________________________________
conv4_block2_2_bn (BatchNormali (None, 14, 14, 256) 1024 conv4_block2_2_conv[0][0]
__________________________________________________________________________________________________
conv4_block2_2_relu (Activation (None, 14, 14, 256) 0 conv4_block2_2_bn[0][0]
__________________________________________________________________________________________________
conv4_block2_3_conv (Conv2D) (None, 14, 14, 1024) 263168 conv4_block2_2_relu[0][0]
__________________________________________________________________________________________________
conv4_block2_3_bn (BatchNormali (None, 14, 14, 1024) 4096 conv4_block2_3_conv[0][0]
__________________________________________________________________________________________________
conv4_block2_add (Add) (None, 14, 14, 1024) 0 conv4_block1_out[0][0]
conv4_block2_3_bn[0][0]
__________________________________________________________________________________________________
conv4_block2_out (Activation) (None, 14, 14, 1024) 0 conv4_block2_add[0][0]
__________________________________________________________________________________________________
conv4_block3_1_conv (Conv2D) (None, 14, 14, 256) 262400 conv4_block2_out[0][0]
__________________________________________________________________________________________________
conv4_block3_1_bn (BatchNormali (None, 14, 14, 256) 1024 conv4_block3_1_conv[0][0]
__________________________________________________________________________________________________
conv4_block3_1_relu (Activation (None, 14, 14, 256) 0 conv4_block3_1_bn[0][0]
__________________________________________________________________________________________________
conv4_block3_2_conv (Conv2D) (None, 14, 14, 256) 590080 conv4_block3_1_relu[0][0]
__________________________________________________________________________________________________
conv4_block3_2_bn (BatchNormali (None, 14, 14, 256) 1024 conv4_block3_2_conv[0][0]
__________________________________________________________________________________________________
conv4_block3_2_relu (Activation (None, 14, 14, 256) 0 conv4_block3_2_bn[0][0]
__________________________________________________________________________________________________
conv4_block3_3_conv (Conv2D) (None, 14, 14, 1024) 263168 conv4_block3_2_relu[0][0]
__________________________________________________________________________________________________
conv4_block3_3_bn (BatchNormali (None, 14, 14, 1024) 4096 conv4_block3_3_conv[0][0]
__________________________________________________________________________________________________
conv4_block3_add (Add) (None, 14, 14, 1024) 0 conv4_block2_out[0][0]
conv4_block3_3_bn[0][0]
__________________________________________________________________________________________________
conv4_block3_out (Activation) (None, 14, 14, 1024) 0 conv4_block3_add[0][0]
__________________________________________________________________________________________________
conv4_block4_1_conv (Conv2D) (None, 14, 14, 256) 262400 conv4_block3_out[0][0]
__________________________________________________________________________________________________
conv4_block4_1_bn (BatchNormali (None, 14, 14, 256) 1024 conv4_block4_1_conv[0][0]
__________________________________________________________________________________________________
conv4_block4_1_relu (Activation (None, 14, 14, 256) 0 conv4_block4_1_bn[0][0]
__________________________________________________________________________________________________
conv4_block4_2_conv (Conv2D) (None, 14, 14, 256) 590080 conv4_block4_1_relu[0][0]
__________________________________________________________________________________________________
conv4_block4_2_bn (BatchNormali (None, 14, 14, 256) 1024 conv4_block4_2_conv[0][0]
__________________________________________________________________________________________________
conv4_block4_2_relu (Activation (None, 14, 14, 256) 0 conv4_block4_2_bn[0][0]
__________________________________________________________________________________________________
conv4_block4_3_conv (Conv2D) (None, 14, 14, 1024) 263168 conv4_block4_2_relu[0][0]
__________________________________________________________________________________________________
conv4_block4_3_bn (BatchNormali (None, 14, 14, 1024) 4096 conv4_block4_3_conv[0][0]
__________________________________________________________________________________________________
conv4_block4_add (Add) (None, 14, 14, 1024) 0 conv4_block3_out[0][0]
conv4_block4_3_bn[0][0]
__________________________________________________________________________________________________
conv4_block4_out (Activation) (None, 14, 14, 1024) 0 conv4_block4_add[0][0]
__________________________________________________________________________________________________
conv4_block5_1_conv (Conv2D) (None, 14, 14, 256) 262400 conv4_block4_out[0][0]
__________________________________________________________________________________________________
conv4_block5_1_bn (BatchNormali (None, 14, 14, 256) 1024 conv4_block5_1_conv[0][0]
__________________________________________________________________________________________________
conv4_block5_1_relu (Activation (None, 14, 14, 256) 0 conv4_block5_1_bn[0][0]
__________________________________________________________________________________________________
conv4_block5_2_conv (Conv2D) (None, 14, 14, 256) 590080 conv4_block5_1_relu[0][0]
__________________________________________________________________________________________________
conv4_block5_2_bn (BatchNormali (None, 14, 14, 256) 1024 conv4_block5_2_conv[0][0]
__________________________________________________________________________________________________
conv4_block5_2_relu (Activation (None, 14, 14, 256) 0 conv4_block5_2_bn[0][0]
__________________________________________________________________________________________________
conv4_block5_3_conv (Conv2D) (None, 14, 14, 1024) 263168 conv4_block5_2_relu[0][0]
__________________________________________________________________________________________________
conv4_block5_3_bn (BatchNormali (None, 14, 14, 1024) 4096 conv4_block5_3_conv[0][0]
__________________________________________________________________________________________________
conv4_block5_add (Add) (None, 14, 14, 1024) 0 conv4_block4_out[0][0]
conv4_block5_3_bn[0][0]
__________________________________________________________________________________________________
conv4_block5_out (Activation) (None, 14, 14, 1024) 0 conv4_block5_add[0][0]
__________________________________________________________________________________________________
conv4_block6_1_conv (Conv2D) (None, 14, 14, 256) 262400 conv4_block5_out[0][0]
__________________________________________________________________________________________________
conv4_block6_1_bn (BatchNormali (None, 14, 14, 256) 1024 conv4_block6_1_conv[0][0]
__________________________________________________________________________________________________
conv4_block6_1_relu (Activation (None, 14, 14, 256) 0 conv4_block6_1_bn[0][0]
__________________________________________________________________________________________________
conv4_block6_2_conv (Conv2D) (None, 14, 14, 256) 590080 conv4_block6_1_relu[0][0]
__________________________________________________________________________________________________
conv4_block6_2_bn (BatchNormali (None, 14, 14, 256) 1024 conv4_block6_2_conv[0][0]
__________________________________________________________________________________________________
conv4_block6_2_relu (Activation (None, 14, 14, 256) 0 conv4_block6_2_bn[0][0]
__________________________________________________________________________________________________
conv4_block6_3_conv (Conv2D) (None, 14, 14, 1024) 263168 conv4_block6_2_relu[0][0]
__________________________________________________________________________________________________
conv4_block6_3_bn (BatchNormali (None, 14, 14, 1024) 4096 conv4_block6_3_conv[0][0]
__________________________________________________________________________________________________
conv4_block6_add (Add) (None, 14, 14, 1024) 0 conv4_block5_out[0][0]
conv4_block6_3_bn[0][0]
__________________________________________________________________________________________________
conv4_block6_out (Activation) (None, 14, 14, 1024) 0 conv4_block6_add[0][0]
__________________________________________________________________________________________________
conv5_block1_1_conv (Conv2D) (None, 7, 7, 512) 524800 conv4_block6_out[0][0]
__________________________________________________________________________________________________
conv5_block1_1_bn (BatchNormali (None, 7, 7, 512) 2048 conv5_block1_1_conv[0][0]
__________________________________________________________________________________________________
conv5_block1_1_relu (Activation (None, 7, 7, 512) 0 conv5_block1_1_bn[0][0]
__________________________________________________________________________________________________
conv5_block1_2_conv (Conv2D) (None, 7, 7, 512) 2359808 conv5_block1_1_relu[0][0]
__________________________________________________________________________________________________
conv5_block1_2_bn (BatchNormali (None, 7, 7, 512) 2048 conv5_block1_2_conv[0][0]
__________________________________________________________________________________________________
conv5_block1_2_relu (Activation (None, 7, 7, 512) 0 conv5_block1_2_bn[0][0]
__________________________________________________________________________________________________
conv5_block1_0_conv (Conv2D) (None, 7, 7, 2048) 2099200 conv4_block6_out[0][0]
__________________________________________________________________________________________________
conv5_block1_3_conv (Conv2D) (None, 7, 7, 2048) 1050624 conv5_block1_2_relu[0][0]
__________________________________________________________________________________________________
conv5_block1_0_bn (BatchNormali (None, 7, 7, 2048) 8192 conv5_block1_0_conv[0][0]
__________________________________________________________________________________________________
conv5_block1_3_bn (BatchNormali (None, 7, 7, 2048) 8192 conv5_block1_3_conv[0][0]
__________________________________________________________________________________________________
conv5_block1_add (Add) (None, 7, 7, 2048) 0 conv5_block1_0_bn[0][0]
conv5_block1_3_bn[0][0]
__________________________________________________________________________________________________
conv5_block1_out (Activation) (None, 7, 7, 2048) 0 conv5_block1_add[0][0]
__________________________________________________________________________________________________
conv5_block2_1_conv (Conv2D) (None, 7, 7, 512) 1049088 conv5_block1_out[0][0]
__________________________________________________________________________________________________
conv5_block2_1_bn (BatchNormali (None, 7, 7, 512) 2048 conv5_block2_1_conv[0][0]
__________________________________________________________________________________________________
conv5_block2_1_relu (Activation (None, 7, 7, 512) 0 conv5_block2_1_bn[0][0]
__________________________________________________________________________________________________
conv5_block2_2_conv (Conv2D) (None, 7, 7, 512) 2359808 conv5_block2_1_relu[0][0]
__________________________________________________________________________________________________
conv5_block2_2_bn (BatchNormali (None, 7, 7, 512) 2048 conv5_block2_2_conv[0][0]
__________________________________________________________________________________________________
conv5_block2_2_relu (Activation (None, 7, 7, 512) 0 conv5_block2_2_bn[0][0]
__________________________________________________________________________________________________
conv5_block2_3_conv (Conv2D) (None, 7, 7, 2048) 1050624 conv5_block2_2_relu[0][0]
__________________________________________________________________________________________________
conv5_block2_3_bn (BatchNormali (None, 7, 7, 2048) 8192 conv5_block2_3_conv[0][0]
__________________________________________________________________________________________________
conv5_block2_add (Add) (None, 7, 7, 2048) 0 conv5_block1_out[0][0]
conv5_block2_3_bn[0][0]
__________________________________________________________________________________________________
conv5_block2_out (Activation) (None, 7, 7, 2048) 0 conv5_block2_add[0][0]
__________________________________________________________________________________________________
conv5_block3_1_conv (Conv2D) (None, 7, 7, 512) 1049088 conv5_block2_out[0][0]
__________________________________________________________________________________________________
conv5_block3_1_bn (BatchNormali (None, 7, 7, 512) 2048 conv5_block3_1_conv[0][0]
__________________________________________________________________________________________________
conv5_block3_1_relu (Activation (None, 7, 7, 512) 0 conv5_block3_1_bn[0][0]
__________________________________________________________________________________________________
conv5_block3_2_conv (Conv2D) (None, 7, 7, 512) 2359808 conv5_block3_1_relu[0][0]
__________________________________________________________________________________________________
conv5_block3_2_bn (BatchNormali (None, 7, 7, 512) 2048 conv5_block3_2_conv[0][0]
__________________________________________________________________________________________________
conv5_block3_2_relu (Activation (None, 7, 7, 512) 0 conv5_block3_2_bn[0][0]
__________________________________________________________________________________________________
conv5_block3_3_conv (Conv2D) (None, 7, 7, 2048) 1050624 conv5_block3_2_relu[0][0]
__________________________________________________________________________________________________
conv5_block3_3_bn (BatchNormali (None, 7, 7, 2048) 8192 conv5_block3_3_conv[0][0]
__________________________________________________________________________________________________
conv5_block3_add (Add) (None, 7, 7, 2048) 0 conv5_block2_out[0][0]
conv5_block3_3_bn[0][0]
__________________________________________________________________________________________________
conv5_block3_out (Activation) (None, 7, 7, 2048) 0 conv5_block3_add[0][0]
__________________________________________________________________________________________________
flatten (Flatten) (None, 100352) 0 conv5_block3_out[0][0]
__________________________________________________________________________________________________
dropout_8 (Dropout) (None, 100352) 0 flatten[0][0]
__________________________________________________________________________________________________
dense_8 (Dense) (None, 5) 501765 dropout_8[0][0]
==================================================================================================
Total params: 24,089,477
Trainable params: 501,765
Non-trainable params: 23,587,712
__________________________________________________________________________________________________
model_resnet.fit_generator(generator=train_generator,
steps_per_epoch=1000,
validation_data=valid_generator,
validation_steps=1000,
epochs=10)
Epoch 1/10 1015/1015 [==============================] - 23s 22ms/step - loss: 0.0124 - accuracy: 0.9972 - val_loss: 0.0170 - val_accuracy: 0.9973 Epoch 2/10 1015/1015 [==============================] - 22s 22ms/step - loss: 0.0101 - accuracy: 0.9978 - val_loss: 0.0167 - val_accuracy: 0.9973 Epoch 3/10 1015/1015 [==============================] - 29s 29ms/step - loss: 0.0047 - accuracy: 0.9992 - val_loss: 0.0586 - val_accuracy: 0.9965 Epoch 4/10 1015/1015 [==============================] - 29s 28ms/step - loss: 0.0077 - accuracy: 0.9986 - val_loss: 0.0174 - val_accuracy: 0.9965 Epoch 5/10 1015/1015 [==============================] - 20s 19ms/step - loss: 0.0065 - accuracy: 0.9989 - val_loss: 0.0303 - val_accuracy: 0.9938 Epoch 6/10 1015/1015 [==============================] - 28s 27ms/step - loss: 0.0074 - accuracy: 0.9992 - val_loss: 0.0372 - val_accuracy: 0.9982 Epoch 7/10 1015/1015 [==============================] - 29s 28ms/step - loss: 0.0036 - accuracy: 0.9993 - val_loss: 0.0344 - val_accuracy: 0.9973 0.0036 - ac Epoch 8/10 1015/1015 [==============================] - 26s 26ms/step - loss: 0.0062 - accuracy: 0.9990 - val_loss: 0.0428 - val_accuracy: 0.9965 Epoch 9/10 1015/1015 [==============================] - 25s 24ms/step - loss: 0.0035 - accuracy: 0.9993 - val_loss: 0.0327 - val_accuracy: 0.9982 Epoch 10/10 1015/1015 [==============================] - 25s 24ms/step - loss: 0.0022 - accuracy: 0.9996 - val_loss: 0.0461 - val_accuracy: 0.9982
accr = model_resnet.evaluate(X_test,y_test)
151/151 [==============================] - 1s 6ms/step - loss: 0.0397 - accuracy: 0.9967
y_pred = model_resnet.predict(X_test)
y_pred=y_pred.argmax(axis=1)
acc1=accr[1]
print('Test set\n Accuracy: {:0.5f}'.format(accr[1]))
Test set Accuracy: 0.99669
print('\n')
print("Precision, Recall, F1")
print('\n')
labels =['No DR', 'Moderate', 'Mild', 'Proliferative DR', 'Severe']
CR=classification_report(y_test, y_pred, target_names=labels)
print(CR)
print('\n')
Precision, Recall, F1
precision recall f1-score support
No DR 1.00 1.00 1.00 297
Moderate 0.99 1.00 0.99 694
Mild 1.00 0.99 1.00 1664
Proliferative DR 1.00 0.99 1.00 960
Severe 1.00 1.00 1.00 1215
accuracy 1.00 4830
macro avg 1.00 1.00 1.00 4830
weighted avg 1.00 1.00 1.00 4830
CM=confusion_matrix(y_test, y_pred)
fig, ax = plot_confusion_matrix(conf_mat=CM,figsize=(10, 10),
show_absolute=True,
show_normed=True,
colorbar=False)
ax.set_xticklabels([''] + labels)
ax.set_yticklabels([''] + labels)
plt.show()
densenet = DenseNet121(weights="imagenet", include_top=False,
input_tensor=Input(shape=(224, 224, 3)))
outputs = densenet.output
outputs = Flatten(name="flatten")(outputs)
outputs = Dropout(0.5)(outputs)
outputs = Dense(5, activation="softmax")(outputs)
model_densenet = Model(inputs=densenet.input, outputs=outputs)
for layer in resnet.layers:
layer.trainable = False
model_densenet.compile(
loss='categorical_crossentropy',
optimizer='adam',
metrics=['accuracy']
)
model_densenet.summary()
Model: "functional_19"
__________________________________________________________________________________________________
Layer (type) Output Shape Param # Connected to
==================================================================================================
input_10 (InputLayer) [(None, 224, 224, 3) 0
__________________________________________________________________________________________________
zero_padding2d_4 (ZeroPadding2D (None, 230, 230, 3) 0 input_10[0][0]
__________________________________________________________________________________________________
conv1/conv (Conv2D) (None, 112, 112, 64) 9408 zero_padding2d_4[0][0]
__________________________________________________________________________________________________
conv1/bn (BatchNormalization) (None, 112, 112, 64) 256 conv1/conv[0][0]
__________________________________________________________________________________________________
conv1/relu (Activation) (None, 112, 112, 64) 0 conv1/bn[0][0]
__________________________________________________________________________________________________
zero_padding2d_5 (ZeroPadding2D (None, 114, 114, 64) 0 conv1/relu[0][0]
__________________________________________________________________________________________________
pool1 (MaxPooling2D) (None, 56, 56, 64) 0 zero_padding2d_5[0][0]
__________________________________________________________________________________________________
conv2_block1_0_bn (BatchNormali (None, 56, 56, 64) 256 pool1[0][0]
__________________________________________________________________________________________________
conv2_block1_0_relu (Activation (None, 56, 56, 64) 0 conv2_block1_0_bn[0][0]
__________________________________________________________________________________________________
conv2_block1_1_conv (Conv2D) (None, 56, 56, 128) 8192 conv2_block1_0_relu[0][0]
__________________________________________________________________________________________________
conv2_block1_1_bn (BatchNormali (None, 56, 56, 128) 512 conv2_block1_1_conv[0][0]
__________________________________________________________________________________________________
conv2_block1_1_relu (Activation (None, 56, 56, 128) 0 conv2_block1_1_bn[0][0]
__________________________________________________________________________________________________
conv2_block1_2_conv (Conv2D) (None, 56, 56, 32) 36864 conv2_block1_1_relu[0][0]
__________________________________________________________________________________________________
conv2_block1_concat (Concatenat (None, 56, 56, 96) 0 pool1[0][0]
conv2_block1_2_conv[0][0]
__________________________________________________________________________________________________
conv2_block2_0_bn (BatchNormali (None, 56, 56, 96) 384 conv2_block1_concat[0][0]
__________________________________________________________________________________________________
conv2_block2_0_relu (Activation (None, 56, 56, 96) 0 conv2_block2_0_bn[0][0]
__________________________________________________________________________________________________
conv2_block2_1_conv (Conv2D) (None, 56, 56, 128) 12288 conv2_block2_0_relu[0][0]
__________________________________________________________________________________________________
conv2_block2_1_bn (BatchNormali (None, 56, 56, 128) 512 conv2_block2_1_conv[0][0]
__________________________________________________________________________________________________
conv2_block2_1_relu (Activation (None, 56, 56, 128) 0 conv2_block2_1_bn[0][0]
__________________________________________________________________________________________________
conv2_block2_2_conv (Conv2D) (None, 56, 56, 32) 36864 conv2_block2_1_relu[0][0]
__________________________________________________________________________________________________
conv2_block2_concat (Concatenat (None, 56, 56, 128) 0 conv2_block1_concat[0][0]
conv2_block2_2_conv[0][0]
__________________________________________________________________________________________________
conv2_block3_0_bn (BatchNormali (None, 56, 56, 128) 512 conv2_block2_concat[0][0]
__________________________________________________________________________________________________
conv2_block3_0_relu (Activation (None, 56, 56, 128) 0 conv2_block3_0_bn[0][0]
__________________________________________________________________________________________________
conv2_block3_1_conv (Conv2D) (None, 56, 56, 128) 16384 conv2_block3_0_relu[0][0]
__________________________________________________________________________________________________
conv2_block3_1_bn (BatchNormali (None, 56, 56, 128) 512 conv2_block3_1_conv[0][0]
__________________________________________________________________________________________________
conv2_block3_1_relu (Activation (None, 56, 56, 128) 0 conv2_block3_1_bn[0][0]
__________________________________________________________________________________________________
conv2_block3_2_conv (Conv2D) (None, 56, 56, 32) 36864 conv2_block3_1_relu[0][0]
__________________________________________________________________________________________________
conv2_block3_concat (Concatenat (None, 56, 56, 160) 0 conv2_block2_concat[0][0]
conv2_block3_2_conv[0][0]
__________________________________________________________________________________________________
conv2_block4_0_bn (BatchNormali (None, 56, 56, 160) 640 conv2_block3_concat[0][0]
__________________________________________________________________________________________________
conv2_block4_0_relu (Activation (None, 56, 56, 160) 0 conv2_block4_0_bn[0][0]
__________________________________________________________________________________________________
conv2_block4_1_conv (Conv2D) (None, 56, 56, 128) 20480 conv2_block4_0_relu[0][0]
__________________________________________________________________________________________________
conv2_block4_1_bn (BatchNormali (None, 56, 56, 128) 512 conv2_block4_1_conv[0][0]
__________________________________________________________________________________________________
conv2_block4_1_relu (Activation (None, 56, 56, 128) 0 conv2_block4_1_bn[0][0]
__________________________________________________________________________________________________
conv2_block4_2_conv (Conv2D) (None, 56, 56, 32) 36864 conv2_block4_1_relu[0][0]
__________________________________________________________________________________________________
conv2_block4_concat (Concatenat (None, 56, 56, 192) 0 conv2_block3_concat[0][0]
conv2_block4_2_conv[0][0]
__________________________________________________________________________________________________
conv2_block5_0_bn (BatchNormali (None, 56, 56, 192) 768 conv2_block4_concat[0][0]
__________________________________________________________________________________________________
conv2_block5_0_relu (Activation (None, 56, 56, 192) 0 conv2_block5_0_bn[0][0]
__________________________________________________________________________________________________
conv2_block5_1_conv (Conv2D) (None, 56, 56, 128) 24576 conv2_block5_0_relu[0][0]
__________________________________________________________________________________________________
conv2_block5_1_bn (BatchNormali (None, 56, 56, 128) 512 conv2_block5_1_conv[0][0]
__________________________________________________________________________________________________
conv2_block5_1_relu (Activation (None, 56, 56, 128) 0 conv2_block5_1_bn[0][0]
__________________________________________________________________________________________________
conv2_block5_2_conv (Conv2D) (None, 56, 56, 32) 36864 conv2_block5_1_relu[0][0]
__________________________________________________________________________________________________
conv2_block5_concat (Concatenat (None, 56, 56, 224) 0 conv2_block4_concat[0][0]
conv2_block5_2_conv[0][0]
__________________________________________________________________________________________________
conv2_block6_0_bn (BatchNormali (None, 56, 56, 224) 896 conv2_block5_concat[0][0]
__________________________________________________________________________________________________
conv2_block6_0_relu (Activation (None, 56, 56, 224) 0 conv2_block6_0_bn[0][0]
__________________________________________________________________________________________________
conv2_block6_1_conv (Conv2D) (None, 56, 56, 128) 28672 conv2_block6_0_relu[0][0]
__________________________________________________________________________________________________
conv2_block6_1_bn (BatchNormali (None, 56, 56, 128) 512 conv2_block6_1_conv[0][0]
__________________________________________________________________________________________________
conv2_block6_1_relu (Activation (None, 56, 56, 128) 0 conv2_block6_1_bn[0][0]
__________________________________________________________________________________________________
conv2_block6_2_conv (Conv2D) (None, 56, 56, 32) 36864 conv2_block6_1_relu[0][0]
__________________________________________________________________________________________________
conv2_block6_concat (Concatenat (None, 56, 56, 256) 0 conv2_block5_concat[0][0]
conv2_block6_2_conv[0][0]
__________________________________________________________________________________________________
pool2_bn (BatchNormalization) (None, 56, 56, 256) 1024 conv2_block6_concat[0][0]
__________________________________________________________________________________________________
pool2_relu (Activation) (None, 56, 56, 256) 0 pool2_bn[0][0]
__________________________________________________________________________________________________
pool2_conv (Conv2D) (None, 56, 56, 128) 32768 pool2_relu[0][0]
__________________________________________________________________________________________________
pool2_pool (AveragePooling2D) (None, 28, 28, 128) 0 pool2_conv[0][0]
__________________________________________________________________________________________________
conv3_block1_0_bn (BatchNormali (None, 28, 28, 128) 512 pool2_pool[0][0]
__________________________________________________________________________________________________
conv3_block1_0_relu (Activation (None, 28, 28, 128) 0 conv3_block1_0_bn[0][0]
__________________________________________________________________________________________________
conv3_block1_1_conv (Conv2D) (None, 28, 28, 128) 16384 conv3_block1_0_relu[0][0]
__________________________________________________________________________________________________
conv3_block1_1_bn (BatchNormali (None, 28, 28, 128) 512 conv3_block1_1_conv[0][0]
__________________________________________________________________________________________________
conv3_block1_1_relu (Activation (None, 28, 28, 128) 0 conv3_block1_1_bn[0][0]
__________________________________________________________________________________________________
conv3_block1_2_conv (Conv2D) (None, 28, 28, 32) 36864 conv3_block1_1_relu[0][0]
__________________________________________________________________________________________________
conv3_block1_concat (Concatenat (None, 28, 28, 160) 0 pool2_pool[0][0]
conv3_block1_2_conv[0][0]
__________________________________________________________________________________________________
conv3_block2_0_bn (BatchNormali (None, 28, 28, 160) 640 conv3_block1_concat[0][0]
__________________________________________________________________________________________________
conv3_block2_0_relu (Activation (None, 28, 28, 160) 0 conv3_block2_0_bn[0][0]
__________________________________________________________________________________________________
conv3_block2_1_conv (Conv2D) (None, 28, 28, 128) 20480 conv3_block2_0_relu[0][0]
__________________________________________________________________________________________________
conv3_block2_1_bn (BatchNormali (None, 28, 28, 128) 512 conv3_block2_1_conv[0][0]
__________________________________________________________________________________________________
conv3_block2_1_relu (Activation (None, 28, 28, 128) 0 conv3_block2_1_bn[0][0]
__________________________________________________________________________________________________
conv3_block2_2_conv (Conv2D) (None, 28, 28, 32) 36864 conv3_block2_1_relu[0][0]
__________________________________________________________________________________________________
conv3_block2_concat (Concatenat (None, 28, 28, 192) 0 conv3_block1_concat[0][0]
conv3_block2_2_conv[0][0]
__________________________________________________________________________________________________
conv3_block3_0_bn (BatchNormali (None, 28, 28, 192) 768 conv3_block2_concat[0][0]
__________________________________________________________________________________________________
conv3_block3_0_relu (Activation (None, 28, 28, 192) 0 conv3_block3_0_bn[0][0]
__________________________________________________________________________________________________
conv3_block3_1_conv (Conv2D) (None, 28, 28, 128) 24576 conv3_block3_0_relu[0][0]
__________________________________________________________________________________________________
conv3_block3_1_bn (BatchNormali (None, 28, 28, 128) 512 conv3_block3_1_conv[0][0]
__________________________________________________________________________________________________
conv3_block3_1_relu (Activation (None, 28, 28, 128) 0 conv3_block3_1_bn[0][0]
__________________________________________________________________________________________________
conv3_block3_2_conv (Conv2D) (None, 28, 28, 32) 36864 conv3_block3_1_relu[0][0]
__________________________________________________________________________________________________
conv3_block3_concat (Concatenat (None, 28, 28, 224) 0 conv3_block2_concat[0][0]
conv3_block3_2_conv[0][0]
__________________________________________________________________________________________________
conv3_block4_0_bn (BatchNormali (None, 28, 28, 224) 896 conv3_block3_concat[0][0]
__________________________________________________________________________________________________
conv3_block4_0_relu (Activation (None, 28, 28, 224) 0 conv3_block4_0_bn[0][0]
__________________________________________________________________________________________________
conv3_block4_1_conv (Conv2D) (None, 28, 28, 128) 28672 conv3_block4_0_relu[0][0]
__________________________________________________________________________________________________
conv3_block4_1_bn (BatchNormali (None, 28, 28, 128) 512 conv3_block4_1_conv[0][0]
__________________________________________________________________________________________________
conv3_block4_1_relu (Activation (None, 28, 28, 128) 0 conv3_block4_1_bn[0][0]
__________________________________________________________________________________________________
conv3_block4_2_conv (Conv2D) (None, 28, 28, 32) 36864 conv3_block4_1_relu[0][0]
__________________________________________________________________________________________________
conv3_block4_concat (Concatenat (None, 28, 28, 256) 0 conv3_block3_concat[0][0]
conv3_block4_2_conv[0][0]
__________________________________________________________________________________________________
conv3_block5_0_bn (BatchNormali (None, 28, 28, 256) 1024 conv3_block4_concat[0][0]
__________________________________________________________________________________________________
conv3_block5_0_relu (Activation (None, 28, 28, 256) 0 conv3_block5_0_bn[0][0]
__________________________________________________________________________________________________
conv3_block5_1_conv (Conv2D) (None, 28, 28, 128) 32768 conv3_block5_0_relu[0][0]
__________________________________________________________________________________________________
conv3_block5_1_bn (BatchNormali (None, 28, 28, 128) 512 conv3_block5_1_conv[0][0]
__________________________________________________________________________________________________
conv3_block5_1_relu (Activation (None, 28, 28, 128) 0 conv3_block5_1_bn[0][0]
__________________________________________________________________________________________________
conv3_block5_2_conv (Conv2D) (None, 28, 28, 32) 36864 conv3_block5_1_relu[0][0]
__________________________________________________________________________________________________
conv3_block5_concat (Concatenat (None, 28, 28, 288) 0 conv3_block4_concat[0][0]
conv3_block5_2_conv[0][0]
__________________________________________________________________________________________________
conv3_block6_0_bn (BatchNormali (None, 28, 28, 288) 1152 conv3_block5_concat[0][0]
__________________________________________________________________________________________________
conv3_block6_0_relu (Activation (None, 28, 28, 288) 0 conv3_block6_0_bn[0][0]
__________________________________________________________________________________________________
conv3_block6_1_conv (Conv2D) (None, 28, 28, 128) 36864 conv3_block6_0_relu[0][0]
__________________________________________________________________________________________________
conv3_block6_1_bn (BatchNormali (None, 28, 28, 128) 512 conv3_block6_1_conv[0][0]
__________________________________________________________________________________________________
conv3_block6_1_relu (Activation (None, 28, 28, 128) 0 conv3_block6_1_bn[0][0]
__________________________________________________________________________________________________
conv3_block6_2_conv (Conv2D) (None, 28, 28, 32) 36864 conv3_block6_1_relu[0][0]
__________________________________________________________________________________________________
conv3_block6_concat (Concatenat (None, 28, 28, 320) 0 conv3_block5_concat[0][0]
conv3_block6_2_conv[0][0]
__________________________________________________________________________________________________
conv3_block7_0_bn (BatchNormali (None, 28, 28, 320) 1280 conv3_block6_concat[0][0]
__________________________________________________________________________________________________
conv3_block7_0_relu (Activation (None, 28, 28, 320) 0 conv3_block7_0_bn[0][0]
__________________________________________________________________________________________________
conv3_block7_1_conv (Conv2D) (None, 28, 28, 128) 40960 conv3_block7_0_relu[0][0]
__________________________________________________________________________________________________
conv3_block7_1_bn (BatchNormali (None, 28, 28, 128) 512 conv3_block7_1_conv[0][0]
__________________________________________________________________________________________________
conv3_block7_1_relu (Activation (None, 28, 28, 128) 0 conv3_block7_1_bn[0][0]
__________________________________________________________________________________________________
conv3_block7_2_conv (Conv2D) (None, 28, 28, 32) 36864 conv3_block7_1_relu[0][0]
__________________________________________________________________________________________________
conv3_block7_concat (Concatenat (None, 28, 28, 352) 0 conv3_block6_concat[0][0]
conv3_block7_2_conv[0][0]
__________________________________________________________________________________________________
conv3_block8_0_bn (BatchNormali (None, 28, 28, 352) 1408 conv3_block7_concat[0][0]
__________________________________________________________________________________________________
conv3_block8_0_relu (Activation (None, 28, 28, 352) 0 conv3_block8_0_bn[0][0]
__________________________________________________________________________________________________
conv3_block8_1_conv (Conv2D) (None, 28, 28, 128) 45056 conv3_block8_0_relu[0][0]
__________________________________________________________________________________________________
conv3_block8_1_bn (BatchNormali (None, 28, 28, 128) 512 conv3_block8_1_conv[0][0]
__________________________________________________________________________________________________
conv3_block8_1_relu (Activation (None, 28, 28, 128) 0 conv3_block8_1_bn[0][0]
__________________________________________________________________________________________________
conv3_block8_2_conv (Conv2D) (None, 28, 28, 32) 36864 conv3_block8_1_relu[0][0]
__________________________________________________________________________________________________
conv3_block8_concat (Concatenat (None, 28, 28, 384) 0 conv3_block7_concat[0][0]
conv3_block8_2_conv[0][0]
__________________________________________________________________________________________________
conv3_block9_0_bn (BatchNormali (None, 28, 28, 384) 1536 conv3_block8_concat[0][0]
__________________________________________________________________________________________________
conv3_block9_0_relu (Activation (None, 28, 28, 384) 0 conv3_block9_0_bn[0][0]
__________________________________________________________________________________________________
conv3_block9_1_conv (Conv2D) (None, 28, 28, 128) 49152 conv3_block9_0_relu[0][0]
__________________________________________________________________________________________________
conv3_block9_1_bn (BatchNormali (None, 28, 28, 128) 512 conv3_block9_1_conv[0][0]
__________________________________________________________________________________________________
conv3_block9_1_relu (Activation (None, 28, 28, 128) 0 conv3_block9_1_bn[0][0]
__________________________________________________________________________________________________
conv3_block9_2_conv (Conv2D) (None, 28, 28, 32) 36864 conv3_block9_1_relu[0][0]
__________________________________________________________________________________________________
conv3_block9_concat (Concatenat (None, 28, 28, 416) 0 conv3_block8_concat[0][0]
conv3_block9_2_conv[0][0]
__________________________________________________________________________________________________
conv3_block10_0_bn (BatchNormal (None, 28, 28, 416) 1664 conv3_block9_concat[0][0]
__________________________________________________________________________________________________
conv3_block10_0_relu (Activatio (None, 28, 28, 416) 0 conv3_block10_0_bn[0][0]
__________________________________________________________________________________________________
conv3_block10_1_conv (Conv2D) (None, 28, 28, 128) 53248 conv3_block10_0_relu[0][0]
__________________________________________________________________________________________________
conv3_block10_1_bn (BatchNormal (None, 28, 28, 128) 512 conv3_block10_1_conv[0][0]
__________________________________________________________________________________________________
conv3_block10_1_relu (Activatio (None, 28, 28, 128) 0 conv3_block10_1_bn[0][0]
__________________________________________________________________________________________________
conv3_block10_2_conv (Conv2D) (None, 28, 28, 32) 36864 conv3_block10_1_relu[0][0]
__________________________________________________________________________________________________
conv3_block10_concat (Concatena (None, 28, 28, 448) 0 conv3_block9_concat[0][0]
conv3_block10_2_conv[0][0]
__________________________________________________________________________________________________
conv3_block11_0_bn (BatchNormal (None, 28, 28, 448) 1792 conv3_block10_concat[0][0]
__________________________________________________________________________________________________
conv3_block11_0_relu (Activatio (None, 28, 28, 448) 0 conv3_block11_0_bn[0][0]
__________________________________________________________________________________________________
conv3_block11_1_conv (Conv2D) (None, 28, 28, 128) 57344 conv3_block11_0_relu[0][0]
__________________________________________________________________________________________________
conv3_block11_1_bn (BatchNormal (None, 28, 28, 128) 512 conv3_block11_1_conv[0][0]
__________________________________________________________________________________________________
conv3_block11_1_relu (Activatio (None, 28, 28, 128) 0 conv3_block11_1_bn[0][0]
__________________________________________________________________________________________________
conv3_block11_2_conv (Conv2D) (None, 28, 28, 32) 36864 conv3_block11_1_relu[0][0]
__________________________________________________________________________________________________
conv3_block11_concat (Concatena (None, 28, 28, 480) 0 conv3_block10_concat[0][0]
conv3_block11_2_conv[0][0]
__________________________________________________________________________________________________
conv3_block12_0_bn (BatchNormal (None, 28, 28, 480) 1920 conv3_block11_concat[0][0]
__________________________________________________________________________________________________
conv3_block12_0_relu (Activatio (None, 28, 28, 480) 0 conv3_block12_0_bn[0][0]
__________________________________________________________________________________________________
conv3_block12_1_conv (Conv2D) (None, 28, 28, 128) 61440 conv3_block12_0_relu[0][0]
__________________________________________________________________________________________________
conv3_block12_1_bn (BatchNormal (None, 28, 28, 128) 512 conv3_block12_1_conv[0][0]
__________________________________________________________________________________________________
conv3_block12_1_relu (Activatio (None, 28, 28, 128) 0 conv3_block12_1_bn[0][0]
__________________________________________________________________________________________________
conv3_block12_2_conv (Conv2D) (None, 28, 28, 32) 36864 conv3_block12_1_relu[0][0]
__________________________________________________________________________________________________
conv3_block12_concat (Concatena (None, 28, 28, 512) 0 conv3_block11_concat[0][0]
conv3_block12_2_conv[0][0]
__________________________________________________________________________________________________
pool3_bn (BatchNormalization) (None, 28, 28, 512) 2048 conv3_block12_concat[0][0]
__________________________________________________________________________________________________
pool3_relu (Activation) (None, 28, 28, 512) 0 pool3_bn[0][0]
__________________________________________________________________________________________________
pool3_conv (Conv2D) (None, 28, 28, 256) 131072 pool3_relu[0][0]
__________________________________________________________________________________________________
pool3_pool (AveragePooling2D) (None, 14, 14, 256) 0 pool3_conv[0][0]
__________________________________________________________________________________________________
conv4_block1_0_bn (BatchNormali (None, 14, 14, 256) 1024 pool3_pool[0][0]
__________________________________________________________________________________________________
conv4_block1_0_relu (Activation (None, 14, 14, 256) 0 conv4_block1_0_bn[0][0]
__________________________________________________________________________________________________
conv4_block1_1_conv (Conv2D) (None, 14, 14, 128) 32768 conv4_block1_0_relu[0][0]
__________________________________________________________________________________________________
conv4_block1_1_bn (BatchNormali (None, 14, 14, 128) 512 conv4_block1_1_conv[0][0]
__________________________________________________________________________________________________
conv4_block1_1_relu (Activation (None, 14, 14, 128) 0 conv4_block1_1_bn[0][0]
__________________________________________________________________________________________________
conv4_block1_2_conv (Conv2D) (None, 14, 14, 32) 36864 conv4_block1_1_relu[0][0]
__________________________________________________________________________________________________
conv4_block1_concat (Concatenat (None, 14, 14, 288) 0 pool3_pool[0][0]
conv4_block1_2_conv[0][0]
__________________________________________________________________________________________________
conv4_block2_0_bn (BatchNormali (None, 14, 14, 288) 1152 conv4_block1_concat[0][0]
__________________________________________________________________________________________________
conv4_block2_0_relu (Activation (None, 14, 14, 288) 0 conv4_block2_0_bn[0][0]
__________________________________________________________________________________________________
conv4_block2_1_conv (Conv2D) (None, 14, 14, 128) 36864 conv4_block2_0_relu[0][0]
__________________________________________________________________________________________________
conv4_block2_1_bn (BatchNormali (None, 14, 14, 128) 512 conv4_block2_1_conv[0][0]
__________________________________________________________________________________________________
conv4_block2_1_relu (Activation (None, 14, 14, 128) 0 conv4_block2_1_bn[0][0]
__________________________________________________________________________________________________
conv4_block2_2_conv (Conv2D) (None, 14, 14, 32) 36864 conv4_block2_1_relu[0][0]
__________________________________________________________________________________________________
conv4_block2_concat (Concatenat (None, 14, 14, 320) 0 conv4_block1_concat[0][0]
conv4_block2_2_conv[0][0]
__________________________________________________________________________________________________
conv4_block3_0_bn (BatchNormali (None, 14, 14, 320) 1280 conv4_block2_concat[0][0]
__________________________________________________________________________________________________
conv4_block3_0_relu (Activation (None, 14, 14, 320) 0 conv4_block3_0_bn[0][0]
__________________________________________________________________________________________________
conv4_block3_1_conv (Conv2D) (None, 14, 14, 128) 40960 conv4_block3_0_relu[0][0]
__________________________________________________________________________________________________
conv4_block3_1_bn (BatchNormali (None, 14, 14, 128) 512 conv4_block3_1_conv[0][0]
__________________________________________________________________________________________________
conv4_block3_1_relu (Activation (None, 14, 14, 128) 0 conv4_block3_1_bn[0][0]
__________________________________________________________________________________________________
conv4_block3_2_conv (Conv2D) (None, 14, 14, 32) 36864 conv4_block3_1_relu[0][0]
__________________________________________________________________________________________________
conv4_block3_concat (Concatenat (None, 14, 14, 352) 0 conv4_block2_concat[0][0]
conv4_block3_2_conv[0][0]
__________________________________________________________________________________________________
conv4_block4_0_bn (BatchNormali (None, 14, 14, 352) 1408 conv4_block3_concat[0][0]
__________________________________________________________________________________________________
conv4_block4_0_relu (Activation (None, 14, 14, 352) 0 conv4_block4_0_bn[0][0]
__________________________________________________________________________________________________
conv4_block4_1_conv (Conv2D) (None, 14, 14, 128) 45056 conv4_block4_0_relu[0][0]
__________________________________________________________________________________________________
conv4_block4_1_bn (BatchNormali (None, 14, 14, 128) 512 conv4_block4_1_conv[0][0]
__________________________________________________________________________________________________
conv4_block4_1_relu (Activation (None, 14, 14, 128) 0 conv4_block4_1_bn[0][0]
__________________________________________________________________________________________________
conv4_block4_2_conv (Conv2D) (None, 14, 14, 32) 36864 conv4_block4_1_relu[0][0]
__________________________________________________________________________________________________
conv4_block4_concat (Concatenat (None, 14, 14, 384) 0 conv4_block3_concat[0][0]
conv4_block4_2_conv[0][0]
__________________________________________________________________________________________________
conv4_block5_0_bn (BatchNormali (None, 14, 14, 384) 1536 conv4_block4_concat[0][0]
__________________________________________________________________________________________________
conv4_block5_0_relu (Activation (None, 14, 14, 384) 0 conv4_block5_0_bn[0][0]
__________________________________________________________________________________________________
conv4_block5_1_conv (Conv2D) (None, 14, 14, 128) 49152 conv4_block5_0_relu[0][0]
__________________________________________________________________________________________________
conv4_block5_1_bn (BatchNormali (None, 14, 14, 128) 512 conv4_block5_1_conv[0][0]
__________________________________________________________________________________________________
conv4_block5_1_relu (Activation (None, 14, 14, 128) 0 conv4_block5_1_bn[0][0]
__________________________________________________________________________________________________
conv4_block5_2_conv (Conv2D) (None, 14, 14, 32) 36864 conv4_block5_1_relu[0][0]
__________________________________________________________________________________________________
conv4_block5_concat (Concatenat (None, 14, 14, 416) 0 conv4_block4_concat[0][0]
conv4_block5_2_conv[0][0]
__________________________________________________________________________________________________
conv4_block6_0_bn (BatchNormali (None, 14, 14, 416) 1664 conv4_block5_concat[0][0]
__________________________________________________________________________________________________
conv4_block6_0_relu (Activation (None, 14, 14, 416) 0 conv4_block6_0_bn[0][0]
__________________________________________________________________________________________________
conv4_block6_1_conv (Conv2D) (None, 14, 14, 128) 53248 conv4_block6_0_relu[0][0]
__________________________________________________________________________________________________
conv4_block6_1_bn (BatchNormali (None, 14, 14, 128) 512 conv4_block6_1_conv[0][0]
__________________________________________________________________________________________________
conv4_block6_1_relu (Activation (None, 14, 14, 128) 0 conv4_block6_1_bn[0][0]
__________________________________________________________________________________________________
conv4_block6_2_conv (Conv2D) (None, 14, 14, 32) 36864 conv4_block6_1_relu[0][0]
__________________________________________________________________________________________________
conv4_block6_concat (Concatenat (None, 14, 14, 448) 0 conv4_block5_concat[0][0]
conv4_block6_2_conv[0][0]
__________________________________________________________________________________________________
conv4_block7_0_bn (BatchNormali (None, 14, 14, 448) 1792 conv4_block6_concat[0][0]
__________________________________________________________________________________________________
conv4_block7_0_relu (Activation (None, 14, 14, 448) 0 conv4_block7_0_bn[0][0]
__________________________________________________________________________________________________
conv4_block7_1_conv (Conv2D) (None, 14, 14, 128) 57344 conv4_block7_0_relu[0][0]
__________________________________________________________________________________________________
conv4_block7_1_bn (BatchNormali (None, 14, 14, 128) 512 conv4_block7_1_conv[0][0]
__________________________________________________________________________________________________
conv4_block7_1_relu (Activation (None, 14, 14, 128) 0 conv4_block7_1_bn[0][0]
__________________________________________________________________________________________________
conv4_block7_2_conv (Conv2D) (None, 14, 14, 32) 36864 conv4_block7_1_relu[0][0]
__________________________________________________________________________________________________
conv4_block7_concat (Concatenat (None, 14, 14, 480) 0 conv4_block6_concat[0][0]
conv4_block7_2_conv[0][0]
__________________________________________________________________________________________________
conv4_block8_0_bn (BatchNormali (None, 14, 14, 480) 1920 conv4_block7_concat[0][0]
__________________________________________________________________________________________________
conv4_block8_0_relu (Activation (None, 14, 14, 480) 0 conv4_block8_0_bn[0][0]
__________________________________________________________________________________________________
conv4_block8_1_conv (Conv2D) (None, 14, 14, 128) 61440 conv4_block8_0_relu[0][0]
__________________________________________________________________________________________________
conv4_block8_1_bn (BatchNormali (None, 14, 14, 128) 512 conv4_block8_1_conv[0][0]
__________________________________________________________________________________________________
conv4_block8_1_relu (Activation (None, 14, 14, 128) 0 conv4_block8_1_bn[0][0]
__________________________________________________________________________________________________
conv4_block8_2_conv (Conv2D) (None, 14, 14, 32) 36864 conv4_block8_1_relu[0][0]
__________________________________________________________________________________________________
conv4_block8_concat (Concatenat (None, 14, 14, 512) 0 conv4_block7_concat[0][0]
conv4_block8_2_conv[0][0]
__________________________________________________________________________________________________
conv4_block9_0_bn (BatchNormali (None, 14, 14, 512) 2048 conv4_block8_concat[0][0]
__________________________________________________________________________________________________
conv4_block9_0_relu (Activation (None, 14, 14, 512) 0 conv4_block9_0_bn[0][0]
__________________________________________________________________________________________________
conv4_block9_1_conv (Conv2D) (None, 14, 14, 128) 65536 conv4_block9_0_relu[0][0]
__________________________________________________________________________________________________
conv4_block9_1_bn (BatchNormali (None, 14, 14, 128) 512 conv4_block9_1_conv[0][0]
__________________________________________________________________________________________________
conv4_block9_1_relu (Activation (None, 14, 14, 128) 0 conv4_block9_1_bn[0][0]
__________________________________________________________________________________________________
conv4_block9_2_conv (Conv2D) (None, 14, 14, 32) 36864 conv4_block9_1_relu[0][0]
__________________________________________________________________________________________________
conv4_block9_concat (Concatenat (None, 14, 14, 544) 0 conv4_block8_concat[0][0]
conv4_block9_2_conv[0][0]
__________________________________________________________________________________________________
conv4_block10_0_bn (BatchNormal (None, 14, 14, 544) 2176 conv4_block9_concat[0][0]
__________________________________________________________________________________________________
conv4_block10_0_relu (Activatio (None, 14, 14, 544) 0 conv4_block10_0_bn[0][0]
__________________________________________________________________________________________________
conv4_block10_1_conv (Conv2D) (None, 14, 14, 128) 69632 conv4_block10_0_relu[0][0]
__________________________________________________________________________________________________
conv4_block10_1_bn (BatchNormal (None, 14, 14, 128) 512 conv4_block10_1_conv[0][0]
__________________________________________________________________________________________________
conv4_block10_1_relu (Activatio (None, 14, 14, 128) 0 conv4_block10_1_bn[0][0]
__________________________________________________________________________________________________
conv4_block10_2_conv (Conv2D) (None, 14, 14, 32) 36864 conv4_block10_1_relu[0][0]
__________________________________________________________________________________________________
conv4_block10_concat (Concatena (None, 14, 14, 576) 0 conv4_block9_concat[0][0]
conv4_block10_2_conv[0][0]
__________________________________________________________________________________________________
conv4_block11_0_bn (BatchNormal (None, 14, 14, 576) 2304 conv4_block10_concat[0][0]
__________________________________________________________________________________________________
conv4_block11_0_relu (Activatio (None, 14, 14, 576) 0 conv4_block11_0_bn[0][0]
__________________________________________________________________________________________________
conv4_block11_1_conv (Conv2D) (None, 14, 14, 128) 73728 conv4_block11_0_relu[0][0]
__________________________________________________________________________________________________
conv4_block11_1_bn (BatchNormal (None, 14, 14, 128) 512 conv4_block11_1_conv[0][0]
__________________________________________________________________________________________________
conv4_block11_1_relu (Activatio (None, 14, 14, 128) 0 conv4_block11_1_bn[0][0]
__________________________________________________________________________________________________
conv4_block11_2_conv (Conv2D) (None, 14, 14, 32) 36864 conv4_block11_1_relu[0][0]
__________________________________________________________________________________________________
conv4_block11_concat (Concatena (None, 14, 14, 608) 0 conv4_block10_concat[0][0]
conv4_block11_2_conv[0][0]
__________________________________________________________________________________________________
conv4_block12_0_bn (BatchNormal (None, 14, 14, 608) 2432 conv4_block11_concat[0][0]
__________________________________________________________________________________________________
conv4_block12_0_relu (Activatio (None, 14, 14, 608) 0 conv4_block12_0_bn[0][0]
__________________________________________________________________________________________________
conv4_block12_1_conv (Conv2D) (None, 14, 14, 128) 77824 conv4_block12_0_relu[0][0]
__________________________________________________________________________________________________
conv4_block12_1_bn (BatchNormal (None, 14, 14, 128) 512 conv4_block12_1_conv[0][0]
__________________________________________________________________________________________________
conv4_block12_1_relu (Activatio (None, 14, 14, 128) 0 conv4_block12_1_bn[0][0]
__________________________________________________________________________________________________
conv4_block12_2_conv (Conv2D) (None, 14, 14, 32) 36864 conv4_block12_1_relu[0][0]
__________________________________________________________________________________________________
conv4_block12_concat (Concatena (None, 14, 14, 640) 0 conv4_block11_concat[0][0]
conv4_block12_2_conv[0][0]
__________________________________________________________________________________________________
conv4_block13_0_bn (BatchNormal (None, 14, 14, 640) 2560 conv4_block12_concat[0][0]
__________________________________________________________________________________________________
conv4_block13_0_relu (Activatio (None, 14, 14, 640) 0 conv4_block13_0_bn[0][0]
__________________________________________________________________________________________________
conv4_block13_1_conv (Conv2D) (None, 14, 14, 128) 81920 conv4_block13_0_relu[0][0]
__________________________________________________________________________________________________
conv4_block13_1_bn (BatchNormal (None, 14, 14, 128) 512 conv4_block13_1_conv[0][0]
__________________________________________________________________________________________________
conv4_block13_1_relu (Activatio (None, 14, 14, 128) 0 conv4_block13_1_bn[0][0]
__________________________________________________________________________________________________
conv4_block13_2_conv (Conv2D) (None, 14, 14, 32) 36864 conv4_block13_1_relu[0][0]
__________________________________________________________________________________________________
conv4_block13_concat (Concatena (None, 14, 14, 672) 0 conv4_block12_concat[0][0]
conv4_block13_2_conv[0][0]
__________________________________________________________________________________________________
conv4_block14_0_bn (BatchNormal (None, 14, 14, 672) 2688 conv4_block13_concat[0][0]
__________________________________________________________________________________________________
conv4_block14_0_relu (Activatio (None, 14, 14, 672) 0 conv4_block14_0_bn[0][0]
__________________________________________________________________________________________________
conv4_block14_1_conv (Conv2D) (None, 14, 14, 128) 86016 conv4_block14_0_relu[0][0]
__________________________________________________________________________________________________
conv4_block14_1_bn (BatchNormal (None, 14, 14, 128) 512 conv4_block14_1_conv[0][0]
__________________________________________________________________________________________________
conv4_block14_1_relu (Activatio (None, 14, 14, 128) 0 conv4_block14_1_bn[0][0]
__________________________________________________________________________________________________
conv4_block14_2_conv (Conv2D) (None, 14, 14, 32) 36864 conv4_block14_1_relu[0][0]
__________________________________________________________________________________________________
conv4_block14_concat (Concatena (None, 14, 14, 704) 0 conv4_block13_concat[0][0]
conv4_block14_2_conv[0][0]
__________________________________________________________________________________________________
conv4_block15_0_bn (BatchNormal (None, 14, 14, 704) 2816 conv4_block14_concat[0][0]
__________________________________________________________________________________________________
conv4_block15_0_relu (Activatio (None, 14, 14, 704) 0 conv4_block15_0_bn[0][0]
__________________________________________________________________________________________________
conv4_block15_1_conv (Conv2D) (None, 14, 14, 128) 90112 conv4_block15_0_relu[0][0]
__________________________________________________________________________________________________
conv4_block15_1_bn (BatchNormal (None, 14, 14, 128) 512 conv4_block15_1_conv[0][0]
__________________________________________________________________________________________________
conv4_block15_1_relu (Activatio (None, 14, 14, 128) 0 conv4_block15_1_bn[0][0]
__________________________________________________________________________________________________
conv4_block15_2_conv (Conv2D) (None, 14, 14, 32) 36864 conv4_block15_1_relu[0][0]
__________________________________________________________________________________________________
conv4_block15_concat (Concatena (None, 14, 14, 736) 0 conv4_block14_concat[0][0]
conv4_block15_2_conv[0][0]
__________________________________________________________________________________________________
conv4_block16_0_bn (BatchNormal (None, 14, 14, 736) 2944 conv4_block15_concat[0][0]
__________________________________________________________________________________________________
conv4_block16_0_relu (Activatio (None, 14, 14, 736) 0 conv4_block16_0_bn[0][0]
__________________________________________________________________________________________________
conv4_block16_1_conv (Conv2D) (None, 14, 14, 128) 94208 conv4_block16_0_relu[0][0]
__________________________________________________________________________________________________
conv4_block16_1_bn (BatchNormal (None, 14, 14, 128) 512 conv4_block16_1_conv[0][0]
__________________________________________________________________________________________________
conv4_block16_1_relu (Activatio (None, 14, 14, 128) 0 conv4_block16_1_bn[0][0]
__________________________________________________________________________________________________
conv4_block16_2_conv (Conv2D) (None, 14, 14, 32) 36864 conv4_block16_1_relu[0][0]
__________________________________________________________________________________________________
conv4_block16_concat (Concatena (None, 14, 14, 768) 0 conv4_block15_concat[0][0]
conv4_block16_2_conv[0][0]
__________________________________________________________________________________________________
conv4_block17_0_bn (BatchNormal (None, 14, 14, 768) 3072 conv4_block16_concat[0][0]
__________________________________________________________________________________________________
conv4_block17_0_relu (Activatio (None, 14, 14, 768) 0 conv4_block17_0_bn[0][0]
__________________________________________________________________________________________________
conv4_block17_1_conv (Conv2D) (None, 14, 14, 128) 98304 conv4_block17_0_relu[0][0]
__________________________________________________________________________________________________
conv4_block17_1_bn (BatchNormal (None, 14, 14, 128) 512 conv4_block17_1_conv[0][0]
__________________________________________________________________________________________________
conv4_block17_1_relu (Activatio (None, 14, 14, 128) 0 conv4_block17_1_bn[0][0]
__________________________________________________________________________________________________
conv4_block17_2_conv (Conv2D) (None, 14, 14, 32) 36864 conv4_block17_1_relu[0][0]
__________________________________________________________________________________________________
conv4_block17_concat (Concatena (None, 14, 14, 800) 0 conv4_block16_concat[0][0]
conv4_block17_2_conv[0][0]
__________________________________________________________________________________________________
conv4_block18_0_bn (BatchNormal (None, 14, 14, 800) 3200 conv4_block17_concat[0][0]
__________________________________________________________________________________________________
conv4_block18_0_relu (Activatio (None, 14, 14, 800) 0 conv4_block18_0_bn[0][0]
__________________________________________________________________________________________________
conv4_block18_1_conv (Conv2D) (None, 14, 14, 128) 102400 conv4_block18_0_relu[0][0]
__________________________________________________________________________________________________
conv4_block18_1_bn (BatchNormal (None, 14, 14, 128) 512 conv4_block18_1_conv[0][0]
__________________________________________________________________________________________________
conv4_block18_1_relu (Activatio (None, 14, 14, 128) 0 conv4_block18_1_bn[0][0]
__________________________________________________________________________________________________
conv4_block18_2_conv (Conv2D) (None, 14, 14, 32) 36864 conv4_block18_1_relu[0][0]
__________________________________________________________________________________________________
conv4_block18_concat (Concatena (None, 14, 14, 832) 0 conv4_block17_concat[0][0]
conv4_block18_2_conv[0][0]
__________________________________________________________________________________________________
conv4_block19_0_bn (BatchNormal (None, 14, 14, 832) 3328 conv4_block18_concat[0][0]
__________________________________________________________________________________________________
conv4_block19_0_relu (Activatio (None, 14, 14, 832) 0 conv4_block19_0_bn[0][0]
__________________________________________________________________________________________________
conv4_block19_1_conv (Conv2D) (None, 14, 14, 128) 106496 conv4_block19_0_relu[0][0]
__________________________________________________________________________________________________
conv4_block19_1_bn (BatchNormal (None, 14, 14, 128) 512 conv4_block19_1_conv[0][0]
__________________________________________________________________________________________________
conv4_block19_1_relu (Activatio (None, 14, 14, 128) 0 conv4_block19_1_bn[0][0]
__________________________________________________________________________________________________
conv4_block19_2_conv (Conv2D) (None, 14, 14, 32) 36864 conv4_block19_1_relu[0][0]
__________________________________________________________________________________________________
conv4_block19_concat (Concatena (None, 14, 14, 864) 0 conv4_block18_concat[0][0]
conv4_block19_2_conv[0][0]
__________________________________________________________________________________________________
conv4_block20_0_bn (BatchNormal (None, 14, 14, 864) 3456 conv4_block19_concat[0][0]
__________________________________________________________________________________________________
conv4_block20_0_relu (Activatio (None, 14, 14, 864) 0 conv4_block20_0_bn[0][0]
__________________________________________________________________________________________________
conv4_block20_1_conv (Conv2D) (None, 14, 14, 128) 110592 conv4_block20_0_relu[0][0]
__________________________________________________________________________________________________
conv4_block20_1_bn (BatchNormal (None, 14, 14, 128) 512 conv4_block20_1_conv[0][0]
__________________________________________________________________________________________________
conv4_block20_1_relu (Activatio (None, 14, 14, 128) 0 conv4_block20_1_bn[0][0]
__________________________________________________________________________________________________
conv4_block20_2_conv (Conv2D) (None, 14, 14, 32) 36864 conv4_block20_1_relu[0][0]
__________________________________________________________________________________________________
conv4_block20_concat (Concatena (None, 14, 14, 896) 0 conv4_block19_concat[0][0]
conv4_block20_2_conv[0][0]
__________________________________________________________________________________________________
conv4_block21_0_bn (BatchNormal (None, 14, 14, 896) 3584 conv4_block20_concat[0][0]
__________________________________________________________________________________________________
conv4_block21_0_relu (Activatio (None, 14, 14, 896) 0 conv4_block21_0_bn[0][0]
__________________________________________________________________________________________________
conv4_block21_1_conv (Conv2D) (None, 14, 14, 128) 114688 conv4_block21_0_relu[0][0]
__________________________________________________________________________________________________
conv4_block21_1_bn (BatchNormal (None, 14, 14, 128) 512 conv4_block21_1_conv[0][0]
__________________________________________________________________________________________________
conv4_block21_1_relu (Activatio (None, 14, 14, 128) 0 conv4_block21_1_bn[0][0]
__________________________________________________________________________________________________
conv4_block21_2_conv (Conv2D) (None, 14, 14, 32) 36864 conv4_block21_1_relu[0][0]
__________________________________________________________________________________________________
conv4_block21_concat (Concatena (None, 14, 14, 928) 0 conv4_block20_concat[0][0]
conv4_block21_2_conv[0][0]
__________________________________________________________________________________________________
conv4_block22_0_bn (BatchNormal (None, 14, 14, 928) 3712 conv4_block21_concat[0][0]
__________________________________________________________________________________________________
conv4_block22_0_relu (Activatio (None, 14, 14, 928) 0 conv4_block22_0_bn[0][0]
__________________________________________________________________________________________________
conv4_block22_1_conv (Conv2D) (None, 14, 14, 128) 118784 conv4_block22_0_relu[0][0]
__________________________________________________________________________________________________
conv4_block22_1_bn (BatchNormal (None, 14, 14, 128) 512 conv4_block22_1_conv[0][0]
__________________________________________________________________________________________________
conv4_block22_1_relu (Activatio (None, 14, 14, 128) 0 conv4_block22_1_bn[0][0]
__________________________________________________________________________________________________
conv4_block22_2_conv (Conv2D) (None, 14, 14, 32) 36864 conv4_block22_1_relu[0][0]
__________________________________________________________________________________________________
conv4_block22_concat (Concatena (None, 14, 14, 960) 0 conv4_block21_concat[0][0]
conv4_block22_2_conv[0][0]
__________________________________________________________________________________________________
conv4_block23_0_bn (BatchNormal (None, 14, 14, 960) 3840 conv4_block22_concat[0][0]
__________________________________________________________________________________________________
conv4_block23_0_relu (Activatio (None, 14, 14, 960) 0 conv4_block23_0_bn[0][0]
__________________________________________________________________________________________________
conv4_block23_1_conv (Conv2D) (None, 14, 14, 128) 122880 conv4_block23_0_relu[0][0]
__________________________________________________________________________________________________
conv4_block23_1_bn (BatchNormal (None, 14, 14, 128) 512 conv4_block23_1_conv[0][0]
__________________________________________________________________________________________________
conv4_block23_1_relu (Activatio (None, 14, 14, 128) 0 conv4_block23_1_bn[0][0]
__________________________________________________________________________________________________
conv4_block23_2_conv (Conv2D) (None, 14, 14, 32) 36864 conv4_block23_1_relu[0][0]
__________________________________________________________________________________________________
conv4_block23_concat (Concatena (None, 14, 14, 992) 0 conv4_block22_concat[0][0]
conv4_block23_2_conv[0][0]
__________________________________________________________________________________________________
conv4_block24_0_bn (BatchNormal (None, 14, 14, 992) 3968 conv4_block23_concat[0][0]
__________________________________________________________________________________________________
conv4_block24_0_relu (Activatio (None, 14, 14, 992) 0 conv4_block24_0_bn[0][0]
__________________________________________________________________________________________________
conv4_block24_1_conv (Conv2D) (None, 14, 14, 128) 126976 conv4_block24_0_relu[0][0]
__________________________________________________________________________________________________
conv4_block24_1_bn (BatchNormal (None, 14, 14, 128) 512 conv4_block24_1_conv[0][0]
__________________________________________________________________________________________________
conv4_block24_1_relu (Activatio (None, 14, 14, 128) 0 conv4_block24_1_bn[0][0]
__________________________________________________________________________________________________
conv4_block24_2_conv (Conv2D) (None, 14, 14, 32) 36864 conv4_block24_1_relu[0][0]
__________________________________________________________________________________________________
conv4_block24_concat (Concatena (None, 14, 14, 1024) 0 conv4_block23_concat[0][0]
conv4_block24_2_conv[0][0]
__________________________________________________________________________________________________
pool4_bn (BatchNormalization) (None, 14, 14, 1024) 4096 conv4_block24_concat[0][0]
__________________________________________________________________________________________________
pool4_relu (Activation) (None, 14, 14, 1024) 0 pool4_bn[0][0]
__________________________________________________________________________________________________
pool4_conv (Conv2D) (None, 14, 14, 512) 524288 pool4_relu[0][0]
__________________________________________________________________________________________________
pool4_pool (AveragePooling2D) (None, 7, 7, 512) 0 pool4_conv[0][0]
__________________________________________________________________________________________________
conv5_block1_0_bn (BatchNormali (None, 7, 7, 512) 2048 pool4_pool[0][0]
__________________________________________________________________________________________________
conv5_block1_0_relu (Activation (None, 7, 7, 512) 0 conv5_block1_0_bn[0][0]
__________________________________________________________________________________________________
conv5_block1_1_conv (Conv2D) (None, 7, 7, 128) 65536 conv5_block1_0_relu[0][0]
__________________________________________________________________________________________________
conv5_block1_1_bn (BatchNormali (None, 7, 7, 128) 512 conv5_block1_1_conv[0][0]
__________________________________________________________________________________________________
conv5_block1_1_relu (Activation (None, 7, 7, 128) 0 conv5_block1_1_bn[0][0]
__________________________________________________________________________________________________
conv5_block1_2_conv (Conv2D) (None, 7, 7, 32) 36864 conv5_block1_1_relu[0][0]
__________________________________________________________________________________________________
conv5_block1_concat (Concatenat (None, 7, 7, 544) 0 pool4_pool[0][0]
conv5_block1_2_conv[0][0]
__________________________________________________________________________________________________
conv5_block2_0_bn (BatchNormali (None, 7, 7, 544) 2176 conv5_block1_concat[0][0]
__________________________________________________________________________________________________
conv5_block2_0_relu (Activation (None, 7, 7, 544) 0 conv5_block2_0_bn[0][0]
__________________________________________________________________________________________________
conv5_block2_1_conv (Conv2D) (None, 7, 7, 128) 69632 conv5_block2_0_relu[0][0]
__________________________________________________________________________________________________
conv5_block2_1_bn (BatchNormali (None, 7, 7, 128) 512 conv5_block2_1_conv[0][0]
__________________________________________________________________________________________________
conv5_block2_1_relu (Activation (None, 7, 7, 128) 0 conv5_block2_1_bn[0][0]
__________________________________________________________________________________________________
conv5_block2_2_conv (Conv2D) (None, 7, 7, 32) 36864 conv5_block2_1_relu[0][0]
__________________________________________________________________________________________________
conv5_block2_concat (Concatenat (None, 7, 7, 576) 0 conv5_block1_concat[0][0]
conv5_block2_2_conv[0][0]
__________________________________________________________________________________________________
conv5_block3_0_bn (BatchNormali (None, 7, 7, 576) 2304 conv5_block2_concat[0][0]
__________________________________________________________________________________________________
conv5_block3_0_relu (Activation (None, 7, 7, 576) 0 conv5_block3_0_bn[0][0]
__________________________________________________________________________________________________
conv5_block3_1_conv (Conv2D) (None, 7, 7, 128) 73728 conv5_block3_0_relu[0][0]
__________________________________________________________________________________________________
conv5_block3_1_bn (BatchNormali (None, 7, 7, 128) 512 conv5_block3_1_conv[0][0]
__________________________________________________________________________________________________
conv5_block3_1_relu (Activation (None, 7, 7, 128) 0 conv5_block3_1_bn[0][0]
__________________________________________________________________________________________________
conv5_block3_2_conv (Conv2D) (None, 7, 7, 32) 36864 conv5_block3_1_relu[0][0]
__________________________________________________________________________________________________
conv5_block3_concat (Concatenat (None, 7, 7, 608) 0 conv5_block2_concat[0][0]
conv5_block3_2_conv[0][0]
__________________________________________________________________________________________________
conv5_block4_0_bn (BatchNormali (None, 7, 7, 608) 2432 conv5_block3_concat[0][0]
__________________________________________________________________________________________________
conv5_block4_0_relu (Activation (None, 7, 7, 608) 0 conv5_block4_0_bn[0][0]
__________________________________________________________________________________________________
conv5_block4_1_conv (Conv2D) (None, 7, 7, 128) 77824 conv5_block4_0_relu[0][0]
__________________________________________________________________________________________________
conv5_block4_1_bn (BatchNormali (None, 7, 7, 128) 512 conv5_block4_1_conv[0][0]
__________________________________________________________________________________________________
conv5_block4_1_relu (Activation (None, 7, 7, 128) 0 conv5_block4_1_bn[0][0]
__________________________________________________________________________________________________
conv5_block4_2_conv (Conv2D) (None, 7, 7, 32) 36864 conv5_block4_1_relu[0][0]
__________________________________________________________________________________________________
conv5_block4_concat (Concatenat (None, 7, 7, 640) 0 conv5_block3_concat[0][0]
conv5_block4_2_conv[0][0]
__________________________________________________________________________________________________
conv5_block5_0_bn (BatchNormali (None, 7, 7, 640) 2560 conv5_block4_concat[0][0]
__________________________________________________________________________________________________
conv5_block5_0_relu (Activation (None, 7, 7, 640) 0 conv5_block5_0_bn[0][0]
__________________________________________________________________________________________________
conv5_block5_1_conv (Conv2D) (None, 7, 7, 128) 81920 conv5_block5_0_relu[0][0]
__________________________________________________________________________________________________
conv5_block5_1_bn (BatchNormali (None, 7, 7, 128) 512 conv5_block5_1_conv[0][0]
__________________________________________________________________________________________________
conv5_block5_1_relu (Activation (None, 7, 7, 128) 0 conv5_block5_1_bn[0][0]
__________________________________________________________________________________________________
conv5_block5_2_conv (Conv2D) (None, 7, 7, 32) 36864 conv5_block5_1_relu[0][0]
__________________________________________________________________________________________________
conv5_block5_concat (Concatenat (None, 7, 7, 672) 0 conv5_block4_concat[0][0]
conv5_block5_2_conv[0][0]
__________________________________________________________________________________________________
conv5_block6_0_bn (BatchNormali (None, 7, 7, 672) 2688 conv5_block5_concat[0][0]
__________________________________________________________________________________________________
conv5_block6_0_relu (Activation (None, 7, 7, 672) 0 conv5_block6_0_bn[0][0]
__________________________________________________________________________________________________
conv5_block6_1_conv (Conv2D) (None, 7, 7, 128) 86016 conv5_block6_0_relu[0][0]
__________________________________________________________________________________________________
conv5_block6_1_bn (BatchNormali (None, 7, 7, 128) 512 conv5_block6_1_conv[0][0]
__________________________________________________________________________________________________
conv5_block6_1_relu (Activation (None, 7, 7, 128) 0 conv5_block6_1_bn[0][0]
__________________________________________________________________________________________________
conv5_block6_2_conv (Conv2D) (None, 7, 7, 32) 36864 conv5_block6_1_relu[0][0]
__________________________________________________________________________________________________
conv5_block6_concat (Concatenat (None, 7, 7, 704) 0 conv5_block5_concat[0][0]
conv5_block6_2_conv[0][0]
__________________________________________________________________________________________________
conv5_block7_0_bn (BatchNormali (None, 7, 7, 704) 2816 conv5_block6_concat[0][0]
__________________________________________________________________________________________________
conv5_block7_0_relu (Activation (None, 7, 7, 704) 0 conv5_block7_0_bn[0][0]
__________________________________________________________________________________________________
conv5_block7_1_conv (Conv2D) (None, 7, 7, 128) 90112 conv5_block7_0_relu[0][0]
__________________________________________________________________________________________________
conv5_block7_1_bn (BatchNormali (None, 7, 7, 128) 512 conv5_block7_1_conv[0][0]
__________________________________________________________________________________________________
conv5_block7_1_relu (Activation (None, 7, 7, 128) 0 conv5_block7_1_bn[0][0]
__________________________________________________________________________________________________
conv5_block7_2_conv (Conv2D) (None, 7, 7, 32) 36864 conv5_block7_1_relu[0][0]
__________________________________________________________________________________________________
conv5_block7_concat (Concatenat (None, 7, 7, 736) 0 conv5_block6_concat[0][0]
conv5_block7_2_conv[0][0]
__________________________________________________________________________________________________
conv5_block8_0_bn (BatchNormali (None, 7, 7, 736) 2944 conv5_block7_concat[0][0]
__________________________________________________________________________________________________
conv5_block8_0_relu (Activation (None, 7, 7, 736) 0 conv5_block8_0_bn[0][0]
__________________________________________________________________________________________________
conv5_block8_1_conv (Conv2D) (None, 7, 7, 128) 94208 conv5_block8_0_relu[0][0]
__________________________________________________________________________________________________
conv5_block8_1_bn (BatchNormali (None, 7, 7, 128) 512 conv5_block8_1_conv[0][0]
__________________________________________________________________________________________________
conv5_block8_1_relu (Activation (None, 7, 7, 128) 0 conv5_block8_1_bn[0][0]
__________________________________________________________________________________________________
conv5_block8_2_conv (Conv2D) (None, 7, 7, 32) 36864 conv5_block8_1_relu[0][0]
__________________________________________________________________________________________________
conv5_block8_concat (Concatenat (None, 7, 7, 768) 0 conv5_block7_concat[0][0]
conv5_block8_2_conv[0][0]
__________________________________________________________________________________________________
conv5_block9_0_bn (BatchNormali (None, 7, 7, 768) 3072 conv5_block8_concat[0][0]
__________________________________________________________________________________________________
conv5_block9_0_relu (Activation (None, 7, 7, 768) 0 conv5_block9_0_bn[0][0]
__________________________________________________________________________________________________
conv5_block9_1_conv (Conv2D) (None, 7, 7, 128) 98304 conv5_block9_0_relu[0][0]
__________________________________________________________________________________________________
conv5_block9_1_bn (BatchNormali (None, 7, 7, 128) 512 conv5_block9_1_conv[0][0]
__________________________________________________________________________________________________
conv5_block9_1_relu (Activation (None, 7, 7, 128) 0 conv5_block9_1_bn[0][0]
__________________________________________________________________________________________________
conv5_block9_2_conv (Conv2D) (None, 7, 7, 32) 36864 conv5_block9_1_relu[0][0]
__________________________________________________________________________________________________
conv5_block9_concat (Concatenat (None, 7, 7, 800) 0 conv5_block8_concat[0][0]
conv5_block9_2_conv[0][0]
__________________________________________________________________________________________________
conv5_block10_0_bn (BatchNormal (None, 7, 7, 800) 3200 conv5_block9_concat[0][0]
__________________________________________________________________________________________________
conv5_block10_0_relu (Activatio (None, 7, 7, 800) 0 conv5_block10_0_bn[0][0]
__________________________________________________________________________________________________
conv5_block10_1_conv (Conv2D) (None, 7, 7, 128) 102400 conv5_block10_0_relu[0][0]
__________________________________________________________________________________________________
conv5_block10_1_bn (BatchNormal (None, 7, 7, 128) 512 conv5_block10_1_conv[0][0]
__________________________________________________________________________________________________
conv5_block10_1_relu (Activatio (None, 7, 7, 128) 0 conv5_block10_1_bn[0][0]
__________________________________________________________________________________________________
conv5_block10_2_conv (Conv2D) (None, 7, 7, 32) 36864 conv5_block10_1_relu[0][0]
__________________________________________________________________________________________________
conv5_block10_concat (Concatena (None, 7, 7, 832) 0 conv5_block9_concat[0][0]
conv5_block10_2_conv[0][0]
__________________________________________________________________________________________________
conv5_block11_0_bn (BatchNormal (None, 7, 7, 832) 3328 conv5_block10_concat[0][0]
__________________________________________________________________________________________________
conv5_block11_0_relu (Activatio (None, 7, 7, 832) 0 conv5_block11_0_bn[0][0]
__________________________________________________________________________________________________
conv5_block11_1_conv (Conv2D) (None, 7, 7, 128) 106496 conv5_block11_0_relu[0][0]
__________________________________________________________________________________________________
conv5_block11_1_bn (BatchNormal (None, 7, 7, 128) 512 conv5_block11_1_conv[0][0]
__________________________________________________________________________________________________
conv5_block11_1_relu (Activatio (None, 7, 7, 128) 0 conv5_block11_1_bn[0][0]
__________________________________________________________________________________________________
conv5_block11_2_conv (Conv2D) (None, 7, 7, 32) 36864 conv5_block11_1_relu[0][0]
__________________________________________________________________________________________________
conv5_block11_concat (Concatena (None, 7, 7, 864) 0 conv5_block10_concat[0][0]
conv5_block11_2_conv[0][0]
__________________________________________________________________________________________________
conv5_block12_0_bn (BatchNormal (None, 7, 7, 864) 3456 conv5_block11_concat[0][0]
__________________________________________________________________________________________________
conv5_block12_0_relu (Activatio (None, 7, 7, 864) 0 conv5_block12_0_bn[0][0]
__________________________________________________________________________________________________
conv5_block12_1_conv (Conv2D) (None, 7, 7, 128) 110592 conv5_block12_0_relu[0][0]
__________________________________________________________________________________________________
conv5_block12_1_bn (BatchNormal (None, 7, 7, 128) 512 conv5_block12_1_conv[0][0]
__________________________________________________________________________________________________
conv5_block12_1_relu (Activatio (None, 7, 7, 128) 0 conv5_block12_1_bn[0][0]
__________________________________________________________________________________________________
conv5_block12_2_conv (Conv2D) (None, 7, 7, 32) 36864 conv5_block12_1_relu[0][0]
__________________________________________________________________________________________________
conv5_block12_concat (Concatena (None, 7, 7, 896) 0 conv5_block11_concat[0][0]
conv5_block12_2_conv[0][0]
__________________________________________________________________________________________________
conv5_block13_0_bn (BatchNormal (None, 7, 7, 896) 3584 conv5_block12_concat[0][0]
__________________________________________________________________________________________________
conv5_block13_0_relu (Activatio (None, 7, 7, 896) 0 conv5_block13_0_bn[0][0]
__________________________________________________________________________________________________
conv5_block13_1_conv (Conv2D) (None, 7, 7, 128) 114688 conv5_block13_0_relu[0][0]
__________________________________________________________________________________________________
conv5_block13_1_bn (BatchNormal (None, 7, 7, 128) 512 conv5_block13_1_conv[0][0]
__________________________________________________________________________________________________
conv5_block13_1_relu (Activatio (None, 7, 7, 128) 0 conv5_block13_1_bn[0][0]
__________________________________________________________________________________________________
conv5_block13_2_conv (Conv2D) (None, 7, 7, 32) 36864 conv5_block13_1_relu[0][0]
__________________________________________________________________________________________________
conv5_block13_concat (Concatena (None, 7, 7, 928) 0 conv5_block12_concat[0][0]
conv5_block13_2_conv[0][0]
__________________________________________________________________________________________________
conv5_block14_0_bn (BatchNormal (None, 7, 7, 928) 3712 conv5_block13_concat[0][0]
__________________________________________________________________________________________________
conv5_block14_0_relu (Activatio (None, 7, 7, 928) 0 conv5_block14_0_bn[0][0]
__________________________________________________________________________________________________
conv5_block14_1_conv (Conv2D) (None, 7, 7, 128) 118784 conv5_block14_0_relu[0][0]
__________________________________________________________________________________________________
conv5_block14_1_bn (BatchNormal (None, 7, 7, 128) 512 conv5_block14_1_conv[0][0]
__________________________________________________________________________________________________
conv5_block14_1_relu (Activatio (None, 7, 7, 128) 0 conv5_block14_1_bn[0][0]
__________________________________________________________________________________________________
conv5_block14_2_conv (Conv2D) (None, 7, 7, 32) 36864 conv5_block14_1_relu[0][0]
__________________________________________________________________________________________________
conv5_block14_concat (Concatena (None, 7, 7, 960) 0 conv5_block13_concat[0][0]
conv5_block14_2_conv[0][0]
__________________________________________________________________________________________________
conv5_block15_0_bn (BatchNormal (None, 7, 7, 960) 3840 conv5_block14_concat[0][0]
__________________________________________________________________________________________________
conv5_block15_0_relu (Activatio (None, 7, 7, 960) 0 conv5_block15_0_bn[0][0]
__________________________________________________________________________________________________
conv5_block15_1_conv (Conv2D) (None, 7, 7, 128) 122880 conv5_block15_0_relu[0][0]
__________________________________________________________________________________________________
conv5_block15_1_bn (BatchNormal (None, 7, 7, 128) 512 conv5_block15_1_conv[0][0]
__________________________________________________________________________________________________
conv5_block15_1_relu (Activatio (None, 7, 7, 128) 0 conv5_block15_1_bn[0][0]
__________________________________________________________________________________________________
conv5_block15_2_conv (Conv2D) (None, 7, 7, 32) 36864 conv5_block15_1_relu[0][0]
__________________________________________________________________________________________________
conv5_block15_concat (Concatena (None, 7, 7, 992) 0 conv5_block14_concat[0][0]
conv5_block15_2_conv[0][0]
__________________________________________________________________________________________________
conv5_block16_0_bn (BatchNormal (None, 7, 7, 992) 3968 conv5_block15_concat[0][0]
__________________________________________________________________________________________________
conv5_block16_0_relu (Activatio (None, 7, 7, 992) 0 conv5_block16_0_bn[0][0]
__________________________________________________________________________________________________
conv5_block16_1_conv (Conv2D) (None, 7, 7, 128) 126976 conv5_block16_0_relu[0][0]
__________________________________________________________________________________________________
conv5_block16_1_bn (BatchNormal (None, 7, 7, 128) 512 conv5_block16_1_conv[0][0]
__________________________________________________________________________________________________
conv5_block16_1_relu (Activatio (None, 7, 7, 128) 0 conv5_block16_1_bn[0][0]
__________________________________________________________________________________________________
conv5_block16_2_conv (Conv2D) (None, 7, 7, 32) 36864 conv5_block16_1_relu[0][0]
__________________________________________________________________________________________________
conv5_block16_concat (Concatena (None, 7, 7, 1024) 0 conv5_block15_concat[0][0]
conv5_block16_2_conv[0][0]
__________________________________________________________________________________________________
bn (BatchNormalization) (None, 7, 7, 1024) 4096 conv5_block16_concat[0][0]
__________________________________________________________________________________________________
relu (Activation) (None, 7, 7, 1024) 0 bn[0][0]
__________________________________________________________________________________________________
flatten (Flatten) (None, 50176) 0 relu[0][0]
__________________________________________________________________________________________________
dropout_9 (Dropout) (None, 50176) 0 flatten[0][0]
__________________________________________________________________________________________________
dense_9 (Dense) (None, 5) 250885 dropout_9[0][0]
==================================================================================================
Total params: 7,288,389
Trainable params: 7,204,741
Non-trainable params: 83,648
__________________________________________________________________________________________________
history= model_densenet.fit_generator(generator=train_generator,
steps_per_epoch=1000,
validation_data=valid_generator,
validation_steps=1000,
epochs=10)
Epoch 1/10 102/102 [==============================] - 11s 104ms/step - loss: 0.1465 - accuracy: 0.9560 - val_loss: 0.0499 - val_accuracy: 0.9902 Epoch 2/10 102/102 [==============================] - 10s 97ms/step - loss: 0.0491 - accuracy: 0.9877 - val_loss: 0.0422 - val_accuracy: 0.9867 Epoch 3/10 102/102 [==============================] - 10s 97ms/step - loss: 0.0303 - accuracy: 0.9908 - val_loss: 0.0324 - val_accuracy: 0.9920 Epoch 4/10 102/102 [==============================] - 9s 93ms/step - loss: 0.0285 - accuracy: 0.9917 - val_loss: 0.0286 - val_accuracy: 0.9902 Epoch 5/10 102/102 [==============================] - 12s 116ms/step - loss: 0.0158 - accuracy: 0.9948 - val_loss: 0.0237 - val_accuracy: 0.9956 Epoch 6/10 102/102 [==============================] - 11s 103ms/step - loss: 0.0142 - accuracy: 0.9953 - val_loss: 0.0301 - val_accuracy: 0.9929 Epoch 7/10 102/102 [==============================] - 11s 105ms/step - loss: 0.0109 - accuracy: 0.9969 - val_loss: 0.0243 - val_accuracy: 0.9911 Epoch 8/10 102/102 [==============================] - 10s 103ms/step - loss: 0.0074 - accuracy: 0.9980 - val_loss: 0.0216 - val_accuracy: 0.9947 Epoch 9/10 102/102 [==============================] - 11s 108ms/step - loss: 0.0100 - accuracy: 0.9969 - val_loss: 0.0237 - val_accuracy: 0.9911 Epoch 10/10 102/102 [==============================] - 12s 121ms/step - loss: 0.0065 - accuracy: 0.9976 - val_loss: 0.0110 - val_accuracy: 0.9982
accr = model_densenet.evaluate(X_test,y_test)
151/151 [==============================] - 2s 12ms/step - loss: 0.0209 - accuracy: 0.9936
y_pred = model_densenet.predict(X_test)
y_pred=y_pred.argmax(axis=1)
acc2=accr[1]
print('Test set\n Accuracy: {:0.5f}'.format(accr[1]))
Test set Accuracy: 0.99358
print('\n')
print("Precision, Recall, F1")
print('\n')
labels =['No DR', 'Moderate', 'Mild', 'Proliferative DR', 'Severe']
CR=classification_report(y_test, y_pred, target_names=labels)
print(CR)
print('\n')
Precision, Recall, F1
precision recall f1-score support
No DR 1.00 0.98 0.99 297
Moderate 1.00 0.99 0.99 694
Mild 0.99 1.00 0.99 1664
Proliferative DR 0.99 0.99 0.99 960
Severe 1.00 0.99 0.99 1215
accuracy 0.99 4830
macro avg 0.99 0.99 0.99 4830
weighted avg 0.99 0.99 0.99 4830
CM=confusion_matrix(y_test, y_pred)
fig, ax = plot_confusion_matrix(conf_mat=CM,figsize=(10, 10),
show_absolute=True,
show_normed=True,
colorbar=False)
ax.set_xticklabels([''] + labels)
ax.set_yticklabels([''] + labels)
plt.show()
exception_model = Xception(weights="imagenet", include_top=False,
input_tensor=Input(shape=(224, 224, 3)))
outputs = exception_model.output
outputs = Flatten(name="flatten")(outputs)
outputs = Dropout(0.5)(outputs)
outputs = Dense(5, activation="softmax")(outputs)
model_exception = Model(inputs=exception_model.input, outputs=outputs)
for layer in resnet.layers:
layer.trainable = False
model_exception.compile(
loss='categorical_crossentropy',
optimizer='adam',
metrics=['accuracy']
)
model_exception.summary()
Model: "functional_21"
__________________________________________________________________________________________________
Layer (type) Output Shape Param # Connected to
==================================================================================================
input_11 (InputLayer) [(None, 224, 224, 3) 0
__________________________________________________________________________________________________
block1_conv1 (Conv2D) (None, 111, 111, 32) 864 input_11[0][0]
__________________________________________________________________________________________________
block1_conv1_bn (BatchNormaliza (None, 111, 111, 32) 128 block1_conv1[0][0]
__________________________________________________________________________________________________
block1_conv1_act (Activation) (None, 111, 111, 32) 0 block1_conv1_bn[0][0]
__________________________________________________________________________________________________
block1_conv2 (Conv2D) (None, 109, 109, 64) 18432 block1_conv1_act[0][0]
__________________________________________________________________________________________________
block1_conv2_bn (BatchNormaliza (None, 109, 109, 64) 256 block1_conv2[0][0]
__________________________________________________________________________________________________
block1_conv2_act (Activation) (None, 109, 109, 64) 0 block1_conv2_bn[0][0]
__________________________________________________________________________________________________
block2_sepconv1 (SeparableConv2 (None, 109, 109, 128 8768 block1_conv2_act[0][0]
__________________________________________________________________________________________________
block2_sepconv1_bn (BatchNormal (None, 109, 109, 128 512 block2_sepconv1[0][0]
__________________________________________________________________________________________________
block2_sepconv2_act (Activation (None, 109, 109, 128 0 block2_sepconv1_bn[0][0]
__________________________________________________________________________________________________
block2_sepconv2 (SeparableConv2 (None, 109, 109, 128 17536 block2_sepconv2_act[0][0]
__________________________________________________________________________________________________
block2_sepconv2_bn (BatchNormal (None, 109, 109, 128 512 block2_sepconv2[0][0]
__________________________________________________________________________________________________
conv2d_211 (Conv2D) (None, 55, 55, 128) 8192 block1_conv2_act[0][0]
__________________________________________________________________________________________________
block2_pool (MaxPooling2D) (None, 55, 55, 128) 0 block2_sepconv2_bn[0][0]
__________________________________________________________________________________________________
batch_normalization_211 (BatchN (None, 55, 55, 128) 512 conv2d_211[0][0]
__________________________________________________________________________________________________
add_24 (Add) (None, 55, 55, 128) 0 block2_pool[0][0]
batch_normalization_211[0][0]
__________________________________________________________________________________________________
block3_sepconv1_act (Activation (None, 55, 55, 128) 0 add_24[0][0]
__________________________________________________________________________________________________
block3_sepconv1 (SeparableConv2 (None, 55, 55, 256) 33920 block3_sepconv1_act[0][0]
__________________________________________________________________________________________________
block3_sepconv1_bn (BatchNormal (None, 55, 55, 256) 1024 block3_sepconv1[0][0]
__________________________________________________________________________________________________
block3_sepconv2_act (Activation (None, 55, 55, 256) 0 block3_sepconv1_bn[0][0]
__________________________________________________________________________________________________
block3_sepconv2 (SeparableConv2 (None, 55, 55, 256) 67840 block3_sepconv2_act[0][0]
__________________________________________________________________________________________________
block3_sepconv2_bn (BatchNormal (None, 55, 55, 256) 1024 block3_sepconv2[0][0]
__________________________________________________________________________________________________
conv2d_212 (Conv2D) (None, 28, 28, 256) 32768 add_24[0][0]
__________________________________________________________________________________________________
block3_pool (MaxPooling2D) (None, 28, 28, 256) 0 block3_sepconv2_bn[0][0]
__________________________________________________________________________________________________
batch_normalization_212 (BatchN (None, 28, 28, 256) 1024 conv2d_212[0][0]
__________________________________________________________________________________________________
add_25 (Add) (None, 28, 28, 256) 0 block3_pool[0][0]
batch_normalization_212[0][0]
__________________________________________________________________________________________________
block4_sepconv1_act (Activation (None, 28, 28, 256) 0 add_25[0][0]
__________________________________________________________________________________________________
block4_sepconv1 (SeparableConv2 (None, 28, 28, 728) 188672 block4_sepconv1_act[0][0]
__________________________________________________________________________________________________
block4_sepconv1_bn (BatchNormal (None, 28, 28, 728) 2912 block4_sepconv1[0][0]
__________________________________________________________________________________________________
block4_sepconv2_act (Activation (None, 28, 28, 728) 0 block4_sepconv1_bn[0][0]
__________________________________________________________________________________________________
block4_sepconv2 (SeparableConv2 (None, 28, 28, 728) 536536 block4_sepconv2_act[0][0]
__________________________________________________________________________________________________
block4_sepconv2_bn (BatchNormal (None, 28, 28, 728) 2912 block4_sepconv2[0][0]
__________________________________________________________________________________________________
conv2d_213 (Conv2D) (None, 14, 14, 728) 186368 add_25[0][0]
__________________________________________________________________________________________________
block4_pool (MaxPooling2D) (None, 14, 14, 728) 0 block4_sepconv2_bn[0][0]
__________________________________________________________________________________________________
batch_normalization_213 (BatchN (None, 14, 14, 728) 2912 conv2d_213[0][0]
__________________________________________________________________________________________________
add_26 (Add) (None, 14, 14, 728) 0 block4_pool[0][0]
batch_normalization_213[0][0]
__________________________________________________________________________________________________
block5_sepconv1_act (Activation (None, 14, 14, 728) 0 add_26[0][0]
__________________________________________________________________________________________________
block5_sepconv1 (SeparableConv2 (None, 14, 14, 728) 536536 block5_sepconv1_act[0][0]
__________________________________________________________________________________________________
block5_sepconv1_bn (BatchNormal (None, 14, 14, 728) 2912 block5_sepconv1[0][0]
__________________________________________________________________________________________________
block5_sepconv2_act (Activation (None, 14, 14, 728) 0 block5_sepconv1_bn[0][0]
__________________________________________________________________________________________________
block5_sepconv2 (SeparableConv2 (None, 14, 14, 728) 536536 block5_sepconv2_act[0][0]
__________________________________________________________________________________________________
block5_sepconv2_bn (BatchNormal (None, 14, 14, 728) 2912 block5_sepconv2[0][0]
__________________________________________________________________________________________________
block5_sepconv3_act (Activation (None, 14, 14, 728) 0 block5_sepconv2_bn[0][0]
__________________________________________________________________________________________________
block5_sepconv3 (SeparableConv2 (None, 14, 14, 728) 536536 block5_sepconv3_act[0][0]
__________________________________________________________________________________________________
block5_sepconv3_bn (BatchNormal (None, 14, 14, 728) 2912 block5_sepconv3[0][0]
__________________________________________________________________________________________________
add_27 (Add) (None, 14, 14, 728) 0 block5_sepconv3_bn[0][0]
add_26[0][0]
__________________________________________________________________________________________________
block6_sepconv1_act (Activation (None, 14, 14, 728) 0 add_27[0][0]
__________________________________________________________________________________________________
block6_sepconv1 (SeparableConv2 (None, 14, 14, 728) 536536 block6_sepconv1_act[0][0]
__________________________________________________________________________________________________
block6_sepconv1_bn (BatchNormal (None, 14, 14, 728) 2912 block6_sepconv1[0][0]
__________________________________________________________________________________________________
block6_sepconv2_act (Activation (None, 14, 14, 728) 0 block6_sepconv1_bn[0][0]
__________________________________________________________________________________________________
block6_sepconv2 (SeparableConv2 (None, 14, 14, 728) 536536 block6_sepconv2_act[0][0]
__________________________________________________________________________________________________
block6_sepconv2_bn (BatchNormal (None, 14, 14, 728) 2912 block6_sepconv2[0][0]
__________________________________________________________________________________________________
block6_sepconv3_act (Activation (None, 14, 14, 728) 0 block6_sepconv2_bn[0][0]
__________________________________________________________________________________________________
block6_sepconv3 (SeparableConv2 (None, 14, 14, 728) 536536 block6_sepconv3_act[0][0]
__________________________________________________________________________________________________
block6_sepconv3_bn (BatchNormal (None, 14, 14, 728) 2912 block6_sepconv3[0][0]
__________________________________________________________________________________________________
add_28 (Add) (None, 14, 14, 728) 0 block6_sepconv3_bn[0][0]
add_27[0][0]
__________________________________________________________________________________________________
block7_sepconv1_act (Activation (None, 14, 14, 728) 0 add_28[0][0]
__________________________________________________________________________________________________
block7_sepconv1 (SeparableConv2 (None, 14, 14, 728) 536536 block7_sepconv1_act[0][0]
__________________________________________________________________________________________________
block7_sepconv1_bn (BatchNormal (None, 14, 14, 728) 2912 block7_sepconv1[0][0]
__________________________________________________________________________________________________
block7_sepconv2_act (Activation (None, 14, 14, 728) 0 block7_sepconv1_bn[0][0]
__________________________________________________________________________________________________
block7_sepconv2 (SeparableConv2 (None, 14, 14, 728) 536536 block7_sepconv2_act[0][0]
__________________________________________________________________________________________________
block7_sepconv2_bn (BatchNormal (None, 14, 14, 728) 2912 block7_sepconv2[0][0]
__________________________________________________________________________________________________
block7_sepconv3_act (Activation (None, 14, 14, 728) 0 block7_sepconv2_bn[0][0]
__________________________________________________________________________________________________
block7_sepconv3 (SeparableConv2 (None, 14, 14, 728) 536536 block7_sepconv3_act[0][0]
__________________________________________________________________________________________________
block7_sepconv3_bn (BatchNormal (None, 14, 14, 728) 2912 block7_sepconv3[0][0]
__________________________________________________________________________________________________
add_29 (Add) (None, 14, 14, 728) 0 block7_sepconv3_bn[0][0]
add_28[0][0]
__________________________________________________________________________________________________
block8_sepconv1_act (Activation (None, 14, 14, 728) 0 add_29[0][0]
__________________________________________________________________________________________________
block8_sepconv1 (SeparableConv2 (None, 14, 14, 728) 536536 block8_sepconv1_act[0][0]
__________________________________________________________________________________________________
block8_sepconv1_bn (BatchNormal (None, 14, 14, 728) 2912 block8_sepconv1[0][0]
__________________________________________________________________________________________________
block8_sepconv2_act (Activation (None, 14, 14, 728) 0 block8_sepconv1_bn[0][0]
__________________________________________________________________________________________________
block8_sepconv2 (SeparableConv2 (None, 14, 14, 728) 536536 block8_sepconv2_act[0][0]
__________________________________________________________________________________________________
block8_sepconv2_bn (BatchNormal (None, 14, 14, 728) 2912 block8_sepconv2[0][0]
__________________________________________________________________________________________________
block8_sepconv3_act (Activation (None, 14, 14, 728) 0 block8_sepconv2_bn[0][0]
__________________________________________________________________________________________________
block8_sepconv3 (SeparableConv2 (None, 14, 14, 728) 536536 block8_sepconv3_act[0][0]
__________________________________________________________________________________________________
block8_sepconv3_bn (BatchNormal (None, 14, 14, 728) 2912 block8_sepconv3[0][0]
__________________________________________________________________________________________________
add_30 (Add) (None, 14, 14, 728) 0 block8_sepconv3_bn[0][0]
add_29[0][0]
__________________________________________________________________________________________________
block9_sepconv1_act (Activation (None, 14, 14, 728) 0 add_30[0][0]
__________________________________________________________________________________________________
block9_sepconv1 (SeparableConv2 (None, 14, 14, 728) 536536 block9_sepconv1_act[0][0]
__________________________________________________________________________________________________
block9_sepconv1_bn (BatchNormal (None, 14, 14, 728) 2912 block9_sepconv1[0][0]
__________________________________________________________________________________________________
block9_sepconv2_act (Activation (None, 14, 14, 728) 0 block9_sepconv1_bn[0][0]
__________________________________________________________________________________________________
block9_sepconv2 (SeparableConv2 (None, 14, 14, 728) 536536 block9_sepconv2_act[0][0]
__________________________________________________________________________________________________
block9_sepconv2_bn (BatchNormal (None, 14, 14, 728) 2912 block9_sepconv2[0][0]
__________________________________________________________________________________________________
block9_sepconv3_act (Activation (None, 14, 14, 728) 0 block9_sepconv2_bn[0][0]
__________________________________________________________________________________________________
block9_sepconv3 (SeparableConv2 (None, 14, 14, 728) 536536 block9_sepconv3_act[0][0]
__________________________________________________________________________________________________
block9_sepconv3_bn (BatchNormal (None, 14, 14, 728) 2912 block9_sepconv3[0][0]
__________________________________________________________________________________________________
add_31 (Add) (None, 14, 14, 728) 0 block9_sepconv3_bn[0][0]
add_30[0][0]
__________________________________________________________________________________________________
block10_sepconv1_act (Activatio (None, 14, 14, 728) 0 add_31[0][0]
__________________________________________________________________________________________________
block10_sepconv1 (SeparableConv (None, 14, 14, 728) 536536 block10_sepconv1_act[0][0]
__________________________________________________________________________________________________
block10_sepconv1_bn (BatchNorma (None, 14, 14, 728) 2912 block10_sepconv1[0][0]
__________________________________________________________________________________________________
block10_sepconv2_act (Activatio (None, 14, 14, 728) 0 block10_sepconv1_bn[0][0]
__________________________________________________________________________________________________
block10_sepconv2 (SeparableConv (None, 14, 14, 728) 536536 block10_sepconv2_act[0][0]
__________________________________________________________________________________________________
block10_sepconv2_bn (BatchNorma (None, 14, 14, 728) 2912 block10_sepconv2[0][0]
__________________________________________________________________________________________________
block10_sepconv3_act (Activatio (None, 14, 14, 728) 0 block10_sepconv2_bn[0][0]
__________________________________________________________________________________________________
block10_sepconv3 (SeparableConv (None, 14, 14, 728) 536536 block10_sepconv3_act[0][0]
__________________________________________________________________________________________________
block10_sepconv3_bn (BatchNorma (None, 14, 14, 728) 2912 block10_sepconv3[0][0]
__________________________________________________________________________________________________
add_32 (Add) (None, 14, 14, 728) 0 block10_sepconv3_bn[0][0]
add_31[0][0]
__________________________________________________________________________________________________
block11_sepconv1_act (Activatio (None, 14, 14, 728) 0 add_32[0][0]
__________________________________________________________________________________________________
block11_sepconv1 (SeparableConv (None, 14, 14, 728) 536536 block11_sepconv1_act[0][0]
__________________________________________________________________________________________________
block11_sepconv1_bn (BatchNorma (None, 14, 14, 728) 2912 block11_sepconv1[0][0]
__________________________________________________________________________________________________
block11_sepconv2_act (Activatio (None, 14, 14, 728) 0 block11_sepconv1_bn[0][0]
__________________________________________________________________________________________________
block11_sepconv2 (SeparableConv (None, 14, 14, 728) 536536 block11_sepconv2_act[0][0]
__________________________________________________________________________________________________
block11_sepconv2_bn (BatchNorma (None, 14, 14, 728) 2912 block11_sepconv2[0][0]
__________________________________________________________________________________________________
block11_sepconv3_act (Activatio (None, 14, 14, 728) 0 block11_sepconv2_bn[0][0]
__________________________________________________________________________________________________
block11_sepconv3 (SeparableConv (None, 14, 14, 728) 536536 block11_sepconv3_act[0][0]
__________________________________________________________________________________________________
block11_sepconv3_bn (BatchNorma (None, 14, 14, 728) 2912 block11_sepconv3[0][0]
__________________________________________________________________________________________________
add_33 (Add) (None, 14, 14, 728) 0 block11_sepconv3_bn[0][0]
add_32[0][0]
__________________________________________________________________________________________________
block12_sepconv1_act (Activatio (None, 14, 14, 728) 0 add_33[0][0]
__________________________________________________________________________________________________
block12_sepconv1 (SeparableConv (None, 14, 14, 728) 536536 block12_sepconv1_act[0][0]
__________________________________________________________________________________________________
block12_sepconv1_bn (BatchNorma (None, 14, 14, 728) 2912 block12_sepconv1[0][0]
__________________________________________________________________________________________________
block12_sepconv2_act (Activatio (None, 14, 14, 728) 0 block12_sepconv1_bn[0][0]
__________________________________________________________________________________________________
block12_sepconv2 (SeparableConv (None, 14, 14, 728) 536536 block12_sepconv2_act[0][0]
__________________________________________________________________________________________________
block12_sepconv2_bn (BatchNorma (None, 14, 14, 728) 2912 block12_sepconv2[0][0]
__________________________________________________________________________________________________
block12_sepconv3_act (Activatio (None, 14, 14, 728) 0 block12_sepconv2_bn[0][0]
__________________________________________________________________________________________________
block12_sepconv3 (SeparableConv (None, 14, 14, 728) 536536 block12_sepconv3_act[0][0]
__________________________________________________________________________________________________
block12_sepconv3_bn (BatchNorma (None, 14, 14, 728) 2912 block12_sepconv3[0][0]
__________________________________________________________________________________________________
add_34 (Add) (None, 14, 14, 728) 0 block12_sepconv3_bn[0][0]
add_33[0][0]
__________________________________________________________________________________________________
block13_sepconv1_act (Activatio (None, 14, 14, 728) 0 add_34[0][0]
__________________________________________________________________________________________________
block13_sepconv1 (SeparableConv (None, 14, 14, 728) 536536 block13_sepconv1_act[0][0]
__________________________________________________________________________________________________
block13_sepconv1_bn (BatchNorma (None, 14, 14, 728) 2912 block13_sepconv1[0][0]
__________________________________________________________________________________________________
block13_sepconv2_act (Activatio (None, 14, 14, 728) 0 block13_sepconv1_bn[0][0]
__________________________________________________________________________________________________
block13_sepconv2 (SeparableConv (None, 14, 14, 1024) 752024 block13_sepconv2_act[0][0]
__________________________________________________________________________________________________
block13_sepconv2_bn (BatchNorma (None, 14, 14, 1024) 4096 block13_sepconv2[0][0]
__________________________________________________________________________________________________
conv2d_214 (Conv2D) (None, 7, 7, 1024) 745472 add_34[0][0]
__________________________________________________________________________________________________
block13_pool (MaxPooling2D) (None, 7, 7, 1024) 0 block13_sepconv2_bn[0][0]
__________________________________________________________________________________________________
batch_normalization_214 (BatchN (None, 7, 7, 1024) 4096 conv2d_214[0][0]
__________________________________________________________________________________________________
add_35 (Add) (None, 7, 7, 1024) 0 block13_pool[0][0]
batch_normalization_214[0][0]
__________________________________________________________________________________________________
block14_sepconv1 (SeparableConv (None, 7, 7, 1536) 1582080 add_35[0][0]
__________________________________________________________________________________________________
block14_sepconv1_bn (BatchNorma (None, 7, 7, 1536) 6144 block14_sepconv1[0][0]
__________________________________________________________________________________________________
block14_sepconv1_act (Activatio (None, 7, 7, 1536) 0 block14_sepconv1_bn[0][0]
__________________________________________________________________________________________________
block14_sepconv2 (SeparableConv (None, 7, 7, 2048) 3159552 block14_sepconv1_act[0][0]
__________________________________________________________________________________________________
block14_sepconv2_bn (BatchNorma (None, 7, 7, 2048) 8192 block14_sepconv2[0][0]
__________________________________________________________________________________________________
block14_sepconv2_act (Activatio (None, 7, 7, 2048) 0 block14_sepconv2_bn[0][0]
__________________________________________________________________________________________________
flatten (Flatten) (None, 100352) 0 block14_sepconv2_act[0][0]
__________________________________________________________________________________________________
dropout_10 (Dropout) (None, 100352) 0 flatten[0][0]
__________________________________________________________________________________________________
dense_10 (Dense) (None, 5) 501765 dropout_10[0][0]
==================================================================================================
Total params: 21,363,245
Trainable params: 21,308,717
Non-trainable params: 54,528
__________________________________________________________________________________________________
history= model_exception.fit_generator(generator=train_generator,
steps_per_epoch=1000,
validation_data=valid_generator,
validation_steps=1000,
epochs=10)
Epoch 1/10 102/102 [==============================] - 8s 82ms/step - loss: 0.4837 - accuracy: 0.8429 - val_loss: 0.0748 - val_accuracy: 0.9787 Epoch 2/10 102/102 [==============================] - 9s 84ms/step - loss: 0.0579 - accuracy: 0.9845 - val_loss: 0.0358 - val_accuracy: 0.9876 Epoch 3/10 102/102 [==============================] - 9s 84ms/step - loss: 0.0337 - accuracy: 0.9908 - val_loss: 0.0377 - val_accuracy: 0.9929 Epoch 4/10 102/102 [==============================] - 9s 87ms/step - loss: 0.0202 - accuracy: 0.9943 - val_loss: 0.0420 - val_accuracy: 0.9894 Epoch 5/10 102/102 [==============================] - 10s 96ms/step - loss: 0.0301 - accuracy: 0.9925 - val_loss: 0.0444 - val_accuracy: 0.9849 Epoch 6/10 102/102 [==============================] - 9s 84ms/step - loss: 0.0159 - accuracy: 0.9960 - val_loss: 0.0134 - val_accuracy: 0.9982 Epoch 7/10 102/102 [==============================] - 10s 95ms/step - loss: 0.0073 - accuracy: 0.9976 - val_loss: 0.0139 - val_accuracy: 0.9956 Epoch 8/10 102/102 [==============================] - 8s 83ms/step - loss: 0.0285 - accuracy: 0.9920 - val_loss: 0.0315 - val_accuracy: 0.9902 Epoch 9/10 102/102 [==============================] - 8s 82ms/step - loss: 0.0120 - accuracy: 0.9965 - val_loss: 0.0229 - val_accuracy: 0.9965 Epoch 10/10 102/102 [==============================] - 8s 83ms/step - loss: 0.0083 - accuracy: 0.9979 - val_loss: 0.0140 - val_accuracy: 0.9965
accr = model_exception.evaluate(X_test,y_test)
151/151 [==============================] - 3s 18ms/step - loss: 0.0194 - accuracy: 0.9963
y_pred = model_exception.predict(X_test)
y_pred=y_pred.argmax(axis=1)
acc3=accr[1]
print('Test set\n Accuracy: {:0.5f}'.format(accr[1]))
Test set Accuracy: 0.99627
print('\n')
print("Precision, Recall, F1")
print('\n')
labels =['No DR', 'Moderate', 'Mild', 'Proliferative DR', 'Severe']
CR=classification_report(y_test, y_pred, target_names=labels)
print(CR)
print('\n')
Precision, Recall, F1
precision recall f1-score support
No DR 1.00 0.99 0.99 297
Moderate 0.99 1.00 0.99 694
Mild 1.00 0.99 1.00 1664
Proliferative DR 1.00 1.00 1.00 960
Severe 0.99 1.00 1.00 1215
accuracy 1.00 4830
macro avg 1.00 1.00 1.00 4830
weighted avg 1.00 1.00 1.00 4830
CM=confusion_matrix(y_test, y_pred)
fig, ax = plot_confusion_matrix(conf_mat=CM,figsize=(10, 10),
show_absolute=True,
show_normed=True,
colorbar=False)
ax.set_xticklabels([''] + labels)
ax.set_yticklabels([''] + labels)
plt.show()
inceptionRe = InceptionResNetV2(weights="imagenet", include_top=False,
input_tensor=Input(shape=(224, 224, 3)))
outputs = inceptionRe.output
outputs = Flatten(name="flatten")(outputs)
outputs = Dropout(0.5)(outputs)
outputs = Dense(5, activation="softmax")(outputs)
model_inceptionNet = Model(inputs=inceptionRe.input, outputs=outputs)
for layer in resnet.layers:
layer.trainable = False
model_inceptionNet.compile(
loss='categorical_crossentropy',
optimizer='adam',
metrics=['accuracy']
)
model_inceptionNet.summary()
Model: "functional_23"
__________________________________________________________________________________________________
Layer (type) Output Shape Param # Connected to
==================================================================================================
input_12 (InputLayer) [(None, 224, 224, 3) 0
__________________________________________________________________________________________________
conv2d_215 (Conv2D) (None, 111, 111, 32) 864 input_12[0][0]
__________________________________________________________________________________________________
batch_normalization_215 (BatchN (None, 111, 111, 32) 96 conv2d_215[0][0]
__________________________________________________________________________________________________
activation_203 (Activation) (None, 111, 111, 32) 0 batch_normalization_215[0][0]
__________________________________________________________________________________________________
conv2d_216 (Conv2D) (None, 109, 109, 32) 9216 activation_203[0][0]
__________________________________________________________________________________________________
batch_normalization_216 (BatchN (None, 109, 109, 32) 96 conv2d_216[0][0]
__________________________________________________________________________________________________
activation_204 (Activation) (None, 109, 109, 32) 0 batch_normalization_216[0][0]
__________________________________________________________________________________________________
conv2d_217 (Conv2D) (None, 109, 109, 64) 18432 activation_204[0][0]
__________________________________________________________________________________________________
batch_normalization_217 (BatchN (None, 109, 109, 64) 192 conv2d_217[0][0]
__________________________________________________________________________________________________
activation_205 (Activation) (None, 109, 109, 64) 0 batch_normalization_217[0][0]
__________________________________________________________________________________________________
max_pooling2d_4 (MaxPooling2D) (None, 54, 54, 64) 0 activation_205[0][0]
__________________________________________________________________________________________________
conv2d_218 (Conv2D) (None, 54, 54, 80) 5120 max_pooling2d_4[0][0]
__________________________________________________________________________________________________
batch_normalization_218 (BatchN (None, 54, 54, 80) 240 conv2d_218[0][0]
__________________________________________________________________________________________________
activation_206 (Activation) (None, 54, 54, 80) 0 batch_normalization_218[0][0]
__________________________________________________________________________________________________
conv2d_219 (Conv2D) (None, 52, 52, 192) 138240 activation_206[0][0]
__________________________________________________________________________________________________
batch_normalization_219 (BatchN (None, 52, 52, 192) 576 conv2d_219[0][0]
__________________________________________________________________________________________________
activation_207 (Activation) (None, 52, 52, 192) 0 batch_normalization_219[0][0]
__________________________________________________________________________________________________
max_pooling2d_5 (MaxPooling2D) (None, 25, 25, 192) 0 activation_207[0][0]
__________________________________________________________________________________________________
conv2d_223 (Conv2D) (None, 25, 25, 64) 12288 max_pooling2d_5[0][0]
__________________________________________________________________________________________________
batch_normalization_223 (BatchN (None, 25, 25, 64) 192 conv2d_223[0][0]
__________________________________________________________________________________________________
activation_211 (Activation) (None, 25, 25, 64) 0 batch_normalization_223[0][0]
__________________________________________________________________________________________________
conv2d_221 (Conv2D) (None, 25, 25, 48) 9216 max_pooling2d_5[0][0]
__________________________________________________________________________________________________
conv2d_224 (Conv2D) (None, 25, 25, 96) 55296 activation_211[0][0]
__________________________________________________________________________________________________
batch_normalization_221 (BatchN (None, 25, 25, 48) 144 conv2d_221[0][0]
__________________________________________________________________________________________________
batch_normalization_224 (BatchN (None, 25, 25, 96) 288 conv2d_224[0][0]
__________________________________________________________________________________________________
activation_209 (Activation) (None, 25, 25, 48) 0 batch_normalization_221[0][0]
__________________________________________________________________________________________________
activation_212 (Activation) (None, 25, 25, 96) 0 batch_normalization_224[0][0]
__________________________________________________________________________________________________
average_pooling2d_1 (AveragePoo (None, 25, 25, 192) 0 max_pooling2d_5[0][0]
__________________________________________________________________________________________________
conv2d_220 (Conv2D) (None, 25, 25, 96) 18432 max_pooling2d_5[0][0]
__________________________________________________________________________________________________
conv2d_222 (Conv2D) (None, 25, 25, 64) 76800 activation_209[0][0]
__________________________________________________________________________________________________
conv2d_225 (Conv2D) (None, 25, 25, 96) 82944 activation_212[0][0]
__________________________________________________________________________________________________
conv2d_226 (Conv2D) (None, 25, 25, 64) 12288 average_pooling2d_1[0][0]
__________________________________________________________________________________________________
batch_normalization_220 (BatchN (None, 25, 25, 96) 288 conv2d_220[0][0]
__________________________________________________________________________________________________
batch_normalization_222 (BatchN (None, 25, 25, 64) 192 conv2d_222[0][0]
__________________________________________________________________________________________________
batch_normalization_225 (BatchN (None, 25, 25, 96) 288 conv2d_225[0][0]
__________________________________________________________________________________________________
batch_normalization_226 (BatchN (None, 25, 25, 64) 192 conv2d_226[0][0]
__________________________________________________________________________________________________
activation_208 (Activation) (None, 25, 25, 96) 0 batch_normalization_220[0][0]
__________________________________________________________________________________________________
activation_210 (Activation) (None, 25, 25, 64) 0 batch_normalization_222[0][0]
__________________________________________________________________________________________________
activation_213 (Activation) (None, 25, 25, 96) 0 batch_normalization_225[0][0]
__________________________________________________________________________________________________
activation_214 (Activation) (None, 25, 25, 64) 0 batch_normalization_226[0][0]
__________________________________________________________________________________________________
mixed_5b (Concatenate) (None, 25, 25, 320) 0 activation_208[0][0]
activation_210[0][0]
activation_213[0][0]
activation_214[0][0]
__________________________________________________________________________________________________
conv2d_230 (Conv2D) (None, 25, 25, 32) 10240 mixed_5b[0][0]
__________________________________________________________________________________________________
batch_normalization_230 (BatchN (None, 25, 25, 32) 96 conv2d_230[0][0]
__________________________________________________________________________________________________
activation_218 (Activation) (None, 25, 25, 32) 0 batch_normalization_230[0][0]
__________________________________________________________________________________________________
conv2d_228 (Conv2D) (None, 25, 25, 32) 10240 mixed_5b[0][0]
__________________________________________________________________________________________________
conv2d_231 (Conv2D) (None, 25, 25, 48) 13824 activation_218[0][0]
__________________________________________________________________________________________________
batch_normalization_228 (BatchN (None, 25, 25, 32) 96 conv2d_228[0][0]
__________________________________________________________________________________________________
batch_normalization_231 (BatchN (None, 25, 25, 48) 144 conv2d_231[0][0]
__________________________________________________________________________________________________
activation_216 (Activation) (None, 25, 25, 32) 0 batch_normalization_228[0][0]
__________________________________________________________________________________________________
activation_219 (Activation) (None, 25, 25, 48) 0 batch_normalization_231[0][0]
__________________________________________________________________________________________________
conv2d_227 (Conv2D) (None, 25, 25, 32) 10240 mixed_5b[0][0]
__________________________________________________________________________________________________
conv2d_229 (Conv2D) (None, 25, 25, 32) 9216 activation_216[0][0]
__________________________________________________________________________________________________
conv2d_232 (Conv2D) (None, 25, 25, 64) 27648 activation_219[0][0]
__________________________________________________________________________________________________
batch_normalization_227 (BatchN (None, 25, 25, 32) 96 conv2d_227[0][0]
__________________________________________________________________________________________________
batch_normalization_229 (BatchN (None, 25, 25, 32) 96 conv2d_229[0][0]
__________________________________________________________________________________________________
batch_normalization_232 (BatchN (None, 25, 25, 64) 192 conv2d_232[0][0]
__________________________________________________________________________________________________
activation_215 (Activation) (None, 25, 25, 32) 0 batch_normalization_227[0][0]
__________________________________________________________________________________________________
activation_217 (Activation) (None, 25, 25, 32) 0 batch_normalization_229[0][0]
__________________________________________________________________________________________________
activation_220 (Activation) (None, 25, 25, 64) 0 batch_normalization_232[0][0]
__________________________________________________________________________________________________
block35_1_mixed (Concatenate) (None, 25, 25, 128) 0 activation_215[0][0]
activation_217[0][0]
activation_220[0][0]
__________________________________________________________________________________________________
block35_1_conv (Conv2D) (None, 25, 25, 320) 41280 block35_1_mixed[0][0]
__________________________________________________________________________________________________
block35_1 (Lambda) (None, 25, 25, 320) 0 mixed_5b[0][0]
block35_1_conv[0][0]
__________________________________________________________________________________________________
block35_1_ac (Activation) (None, 25, 25, 320) 0 block35_1[0][0]
__________________________________________________________________________________________________
conv2d_236 (Conv2D) (None, 25, 25, 32) 10240 block35_1_ac[0][0]
__________________________________________________________________________________________________
batch_normalization_236 (BatchN (None, 25, 25, 32) 96 conv2d_236[0][0]
__________________________________________________________________________________________________
activation_224 (Activation) (None, 25, 25, 32) 0 batch_normalization_236[0][0]
__________________________________________________________________________________________________
conv2d_234 (Conv2D) (None, 25, 25, 32) 10240 block35_1_ac[0][0]
__________________________________________________________________________________________________
conv2d_237 (Conv2D) (None, 25, 25, 48) 13824 activation_224[0][0]
__________________________________________________________________________________________________
batch_normalization_234 (BatchN (None, 25, 25, 32) 96 conv2d_234[0][0]
__________________________________________________________________________________________________
batch_normalization_237 (BatchN (None, 25, 25, 48) 144 conv2d_237[0][0]
__________________________________________________________________________________________________
activation_222 (Activation) (None, 25, 25, 32) 0 batch_normalization_234[0][0]
__________________________________________________________________________________________________
activation_225 (Activation) (None, 25, 25, 48) 0 batch_normalization_237[0][0]
__________________________________________________________________________________________________
conv2d_233 (Conv2D) (None, 25, 25, 32) 10240 block35_1_ac[0][0]
__________________________________________________________________________________________________
conv2d_235 (Conv2D) (None, 25, 25, 32) 9216 activation_222[0][0]
__________________________________________________________________________________________________
conv2d_238 (Conv2D) (None, 25, 25, 64) 27648 activation_225[0][0]
__________________________________________________________________________________________________
batch_normalization_233 (BatchN (None, 25, 25, 32) 96 conv2d_233[0][0]
__________________________________________________________________________________________________
batch_normalization_235 (BatchN (None, 25, 25, 32) 96 conv2d_235[0][0]
__________________________________________________________________________________________________
batch_normalization_238 (BatchN (None, 25, 25, 64) 192 conv2d_238[0][0]
__________________________________________________________________________________________________
activation_221 (Activation) (None, 25, 25, 32) 0 batch_normalization_233[0][0]
__________________________________________________________________________________________________
activation_223 (Activation) (None, 25, 25, 32) 0 batch_normalization_235[0][0]
__________________________________________________________________________________________________
activation_226 (Activation) (None, 25, 25, 64) 0 batch_normalization_238[0][0]
__________________________________________________________________________________________________
block35_2_mixed (Concatenate) (None, 25, 25, 128) 0 activation_221[0][0]
activation_223[0][0]
activation_226[0][0]
__________________________________________________________________________________________________
block35_2_conv (Conv2D) (None, 25, 25, 320) 41280 block35_2_mixed[0][0]
__________________________________________________________________________________________________
block35_2 (Lambda) (None, 25, 25, 320) 0 block35_1_ac[0][0]
block35_2_conv[0][0]
__________________________________________________________________________________________________
block35_2_ac (Activation) (None, 25, 25, 320) 0 block35_2[0][0]
__________________________________________________________________________________________________
conv2d_242 (Conv2D) (None, 25, 25, 32) 10240 block35_2_ac[0][0]
__________________________________________________________________________________________________
batch_normalization_242 (BatchN (None, 25, 25, 32) 96 conv2d_242[0][0]
__________________________________________________________________________________________________
activation_230 (Activation) (None, 25, 25, 32) 0 batch_normalization_242[0][0]
__________________________________________________________________________________________________
conv2d_240 (Conv2D) (None, 25, 25, 32) 10240 block35_2_ac[0][0]
__________________________________________________________________________________________________
conv2d_243 (Conv2D) (None, 25, 25, 48) 13824 activation_230[0][0]
__________________________________________________________________________________________________
batch_normalization_240 (BatchN (None, 25, 25, 32) 96 conv2d_240[0][0]
__________________________________________________________________________________________________
batch_normalization_243 (BatchN (None, 25, 25, 48) 144 conv2d_243[0][0]
__________________________________________________________________________________________________
activation_228 (Activation) (None, 25, 25, 32) 0 batch_normalization_240[0][0]
__________________________________________________________________________________________________
activation_231 (Activation) (None, 25, 25, 48) 0 batch_normalization_243[0][0]
__________________________________________________________________________________________________
conv2d_239 (Conv2D) (None, 25, 25, 32) 10240 block35_2_ac[0][0]
__________________________________________________________________________________________________
conv2d_241 (Conv2D) (None, 25, 25, 32) 9216 activation_228[0][0]
__________________________________________________________________________________________________
conv2d_244 (Conv2D) (None, 25, 25, 64) 27648 activation_231[0][0]
__________________________________________________________________________________________________
batch_normalization_239 (BatchN (None, 25, 25, 32) 96 conv2d_239[0][0]
__________________________________________________________________________________________________
batch_normalization_241 (BatchN (None, 25, 25, 32) 96 conv2d_241[0][0]
__________________________________________________________________________________________________
batch_normalization_244 (BatchN (None, 25, 25, 64) 192 conv2d_244[0][0]
__________________________________________________________________________________________________
activation_227 (Activation) (None, 25, 25, 32) 0 batch_normalization_239[0][0]
__________________________________________________________________________________________________
activation_229 (Activation) (None, 25, 25, 32) 0 batch_normalization_241[0][0]
__________________________________________________________________________________________________
activation_232 (Activation) (None, 25, 25, 64) 0 batch_normalization_244[0][0]
__________________________________________________________________________________________________
block35_3_mixed (Concatenate) (None, 25, 25, 128) 0 activation_227[0][0]
activation_229[0][0]
activation_232[0][0]
__________________________________________________________________________________________________
block35_3_conv (Conv2D) (None, 25, 25, 320) 41280 block35_3_mixed[0][0]
__________________________________________________________________________________________________
block35_3 (Lambda) (None, 25, 25, 320) 0 block35_2_ac[0][0]
block35_3_conv[0][0]
__________________________________________________________________________________________________
block35_3_ac (Activation) (None, 25, 25, 320) 0 block35_3[0][0]
__________________________________________________________________________________________________
conv2d_248 (Conv2D) (None, 25, 25, 32) 10240 block35_3_ac[0][0]
__________________________________________________________________________________________________
batch_normalization_248 (BatchN (None, 25, 25, 32) 96 conv2d_248[0][0]
__________________________________________________________________________________________________
activation_236 (Activation) (None, 25, 25, 32) 0 batch_normalization_248[0][0]
__________________________________________________________________________________________________
conv2d_246 (Conv2D) (None, 25, 25, 32) 10240 block35_3_ac[0][0]
__________________________________________________________________________________________________
conv2d_249 (Conv2D) (None, 25, 25, 48) 13824 activation_236[0][0]
__________________________________________________________________________________________________
batch_normalization_246 (BatchN (None, 25, 25, 32) 96 conv2d_246[0][0]
__________________________________________________________________________________________________
batch_normalization_249 (BatchN (None, 25, 25, 48) 144 conv2d_249[0][0]
__________________________________________________________________________________________________
activation_234 (Activation) (None, 25, 25, 32) 0 batch_normalization_246[0][0]
__________________________________________________________________________________________________
activation_237 (Activation) (None, 25, 25, 48) 0 batch_normalization_249[0][0]
__________________________________________________________________________________________________
conv2d_245 (Conv2D) (None, 25, 25, 32) 10240 block35_3_ac[0][0]
__________________________________________________________________________________________________
conv2d_247 (Conv2D) (None, 25, 25, 32) 9216 activation_234[0][0]
__________________________________________________________________________________________________
conv2d_250 (Conv2D) (None, 25, 25, 64) 27648 activation_237[0][0]
__________________________________________________________________________________________________
batch_normalization_245 (BatchN (None, 25, 25, 32) 96 conv2d_245[0][0]
__________________________________________________________________________________________________
batch_normalization_247 (BatchN (None, 25, 25, 32) 96 conv2d_247[0][0]
__________________________________________________________________________________________________
batch_normalization_250 (BatchN (None, 25, 25, 64) 192 conv2d_250[0][0]
__________________________________________________________________________________________________
activation_233 (Activation) (None, 25, 25, 32) 0 batch_normalization_245[0][0]
__________________________________________________________________________________________________
activation_235 (Activation) (None, 25, 25, 32) 0 batch_normalization_247[0][0]
__________________________________________________________________________________________________
activation_238 (Activation) (None, 25, 25, 64) 0 batch_normalization_250[0][0]
__________________________________________________________________________________________________
block35_4_mixed (Concatenate) (None, 25, 25, 128) 0 activation_233[0][0]
activation_235[0][0]
activation_238[0][0]
__________________________________________________________________________________________________
block35_4_conv (Conv2D) (None, 25, 25, 320) 41280 block35_4_mixed[0][0]
__________________________________________________________________________________________________
block35_4 (Lambda) (None, 25, 25, 320) 0 block35_3_ac[0][0]
block35_4_conv[0][0]
__________________________________________________________________________________________________
block35_4_ac (Activation) (None, 25, 25, 320) 0 block35_4[0][0]
__________________________________________________________________________________________________
conv2d_254 (Conv2D) (None, 25, 25, 32) 10240 block35_4_ac[0][0]
__________________________________________________________________________________________________
batch_normalization_254 (BatchN (None, 25, 25, 32) 96 conv2d_254[0][0]
__________________________________________________________________________________________________
activation_242 (Activation) (None, 25, 25, 32) 0 batch_normalization_254[0][0]
__________________________________________________________________________________________________
conv2d_252 (Conv2D) (None, 25, 25, 32) 10240 block35_4_ac[0][0]
__________________________________________________________________________________________________
conv2d_255 (Conv2D) (None, 25, 25, 48) 13824 activation_242[0][0]
__________________________________________________________________________________________________
batch_normalization_252 (BatchN (None, 25, 25, 32) 96 conv2d_252[0][0]
__________________________________________________________________________________________________
batch_normalization_255 (BatchN (None, 25, 25, 48) 144 conv2d_255[0][0]
__________________________________________________________________________________________________
activation_240 (Activation) (None, 25, 25, 32) 0 batch_normalization_252[0][0]
__________________________________________________________________________________________________
activation_243 (Activation) (None, 25, 25, 48) 0 batch_normalization_255[0][0]
__________________________________________________________________________________________________
conv2d_251 (Conv2D) (None, 25, 25, 32) 10240 block35_4_ac[0][0]
__________________________________________________________________________________________________
conv2d_253 (Conv2D) (None, 25, 25, 32) 9216 activation_240[0][0]
__________________________________________________________________________________________________
conv2d_256 (Conv2D) (None, 25, 25, 64) 27648 activation_243[0][0]
__________________________________________________________________________________________________
batch_normalization_251 (BatchN (None, 25, 25, 32) 96 conv2d_251[0][0]
__________________________________________________________________________________________________
batch_normalization_253 (BatchN (None, 25, 25, 32) 96 conv2d_253[0][0]
__________________________________________________________________________________________________
batch_normalization_256 (BatchN (None, 25, 25, 64) 192 conv2d_256[0][0]
__________________________________________________________________________________________________
activation_239 (Activation) (None, 25, 25, 32) 0 batch_normalization_251[0][0]
__________________________________________________________________________________________________
activation_241 (Activation) (None, 25, 25, 32) 0 batch_normalization_253[0][0]
__________________________________________________________________________________________________
activation_244 (Activation) (None, 25, 25, 64) 0 batch_normalization_256[0][0]
__________________________________________________________________________________________________
block35_5_mixed (Concatenate) (None, 25, 25, 128) 0 activation_239[0][0]
activation_241[0][0]
activation_244[0][0]
__________________________________________________________________________________________________
block35_5_conv (Conv2D) (None, 25, 25, 320) 41280 block35_5_mixed[0][0]
__________________________________________________________________________________________________
block35_5 (Lambda) (None, 25, 25, 320) 0 block35_4_ac[0][0]
block35_5_conv[0][0]
__________________________________________________________________________________________________
block35_5_ac (Activation) (None, 25, 25, 320) 0 block35_5[0][0]
__________________________________________________________________________________________________
conv2d_260 (Conv2D) (None, 25, 25, 32) 10240 block35_5_ac[0][0]
__________________________________________________________________________________________________
batch_normalization_260 (BatchN (None, 25, 25, 32) 96 conv2d_260[0][0]
__________________________________________________________________________________________________
activation_248 (Activation) (None, 25, 25, 32) 0 batch_normalization_260[0][0]
__________________________________________________________________________________________________
conv2d_258 (Conv2D) (None, 25, 25, 32) 10240 block35_5_ac[0][0]
__________________________________________________________________________________________________
conv2d_261 (Conv2D) (None, 25, 25, 48) 13824 activation_248[0][0]
__________________________________________________________________________________________________
batch_normalization_258 (BatchN (None, 25, 25, 32) 96 conv2d_258[0][0]
__________________________________________________________________________________________________
batch_normalization_261 (BatchN (None, 25, 25, 48) 144 conv2d_261[0][0]
__________________________________________________________________________________________________
activation_246 (Activation) (None, 25, 25, 32) 0 batch_normalization_258[0][0]
__________________________________________________________________________________________________
activation_249 (Activation) (None, 25, 25, 48) 0 batch_normalization_261[0][0]
__________________________________________________________________________________________________
conv2d_257 (Conv2D) (None, 25, 25, 32) 10240 block35_5_ac[0][0]
__________________________________________________________________________________________________
conv2d_259 (Conv2D) (None, 25, 25, 32) 9216 activation_246[0][0]
__________________________________________________________________________________________________
conv2d_262 (Conv2D) (None, 25, 25, 64) 27648 activation_249[0][0]
__________________________________________________________________________________________________
batch_normalization_257 (BatchN (None, 25, 25, 32) 96 conv2d_257[0][0]
__________________________________________________________________________________________________
batch_normalization_259 (BatchN (None, 25, 25, 32) 96 conv2d_259[0][0]
__________________________________________________________________________________________________
batch_normalization_262 (BatchN (None, 25, 25, 64) 192 conv2d_262[0][0]
__________________________________________________________________________________________________
activation_245 (Activation) (None, 25, 25, 32) 0 batch_normalization_257[0][0]
__________________________________________________________________________________________________
activation_247 (Activation) (None, 25, 25, 32) 0 batch_normalization_259[0][0]
__________________________________________________________________________________________________
activation_250 (Activation) (None, 25, 25, 64) 0 batch_normalization_262[0][0]
__________________________________________________________________________________________________
block35_6_mixed (Concatenate) (None, 25, 25, 128) 0 activation_245[0][0]
activation_247[0][0]
activation_250[0][0]
__________________________________________________________________________________________________
block35_6_conv (Conv2D) (None, 25, 25, 320) 41280 block35_6_mixed[0][0]
__________________________________________________________________________________________________
block35_6 (Lambda) (None, 25, 25, 320) 0 block35_5_ac[0][0]
block35_6_conv[0][0]
__________________________________________________________________________________________________
block35_6_ac (Activation) (None, 25, 25, 320) 0 block35_6[0][0]
__________________________________________________________________________________________________
conv2d_266 (Conv2D) (None, 25, 25, 32) 10240 block35_6_ac[0][0]
__________________________________________________________________________________________________
batch_normalization_266 (BatchN (None, 25, 25, 32) 96 conv2d_266[0][0]
__________________________________________________________________________________________________
activation_254 (Activation) (None, 25, 25, 32) 0 batch_normalization_266[0][0]
__________________________________________________________________________________________________
conv2d_264 (Conv2D) (None, 25, 25, 32) 10240 block35_6_ac[0][0]
__________________________________________________________________________________________________
conv2d_267 (Conv2D) (None, 25, 25, 48) 13824 activation_254[0][0]
__________________________________________________________________________________________________
batch_normalization_264 (BatchN (None, 25, 25, 32) 96 conv2d_264[0][0]
__________________________________________________________________________________________________
batch_normalization_267 (BatchN (None, 25, 25, 48) 144 conv2d_267[0][0]
__________________________________________________________________________________________________
activation_252 (Activation) (None, 25, 25, 32) 0 batch_normalization_264[0][0]
__________________________________________________________________________________________________
activation_255 (Activation) (None, 25, 25, 48) 0 batch_normalization_267[0][0]
__________________________________________________________________________________________________
conv2d_263 (Conv2D) (None, 25, 25, 32) 10240 block35_6_ac[0][0]
__________________________________________________________________________________________________
conv2d_265 (Conv2D) (None, 25, 25, 32) 9216 activation_252[0][0]
__________________________________________________________________________________________________
conv2d_268 (Conv2D) (None, 25, 25, 64) 27648 activation_255[0][0]
__________________________________________________________________________________________________
batch_normalization_263 (BatchN (None, 25, 25, 32) 96 conv2d_263[0][0]
__________________________________________________________________________________________________
batch_normalization_265 (BatchN (None, 25, 25, 32) 96 conv2d_265[0][0]
__________________________________________________________________________________________________
batch_normalization_268 (BatchN (None, 25, 25, 64) 192 conv2d_268[0][0]
__________________________________________________________________________________________________
activation_251 (Activation) (None, 25, 25, 32) 0 batch_normalization_263[0][0]
__________________________________________________________________________________________________
activation_253 (Activation) (None, 25, 25, 32) 0 batch_normalization_265[0][0]
__________________________________________________________________________________________________
activation_256 (Activation) (None, 25, 25, 64) 0 batch_normalization_268[0][0]
__________________________________________________________________________________________________
block35_7_mixed (Concatenate) (None, 25, 25, 128) 0 activation_251[0][0]
activation_253[0][0]
activation_256[0][0]
__________________________________________________________________________________________________
block35_7_conv (Conv2D) (None, 25, 25, 320) 41280 block35_7_mixed[0][0]
__________________________________________________________________________________________________
block35_7 (Lambda) (None, 25, 25, 320) 0 block35_6_ac[0][0]
block35_7_conv[0][0]
__________________________________________________________________________________________________
block35_7_ac (Activation) (None, 25, 25, 320) 0 block35_7[0][0]
__________________________________________________________________________________________________
conv2d_272 (Conv2D) (None, 25, 25, 32) 10240 block35_7_ac[0][0]
__________________________________________________________________________________________________
batch_normalization_272 (BatchN (None, 25, 25, 32) 96 conv2d_272[0][0]
__________________________________________________________________________________________________
activation_260 (Activation) (None, 25, 25, 32) 0 batch_normalization_272[0][0]
__________________________________________________________________________________________________
conv2d_270 (Conv2D) (None, 25, 25, 32) 10240 block35_7_ac[0][0]
__________________________________________________________________________________________________
conv2d_273 (Conv2D) (None, 25, 25, 48) 13824 activation_260[0][0]
__________________________________________________________________________________________________
batch_normalization_270 (BatchN (None, 25, 25, 32) 96 conv2d_270[0][0]
__________________________________________________________________________________________________
batch_normalization_273 (BatchN (None, 25, 25, 48) 144 conv2d_273[0][0]
__________________________________________________________________________________________________
activation_258 (Activation) (None, 25, 25, 32) 0 batch_normalization_270[0][0]
__________________________________________________________________________________________________
activation_261 (Activation) (None, 25, 25, 48) 0 batch_normalization_273[0][0]
__________________________________________________________________________________________________
conv2d_269 (Conv2D) (None, 25, 25, 32) 10240 block35_7_ac[0][0]
__________________________________________________________________________________________________
conv2d_271 (Conv2D) (None, 25, 25, 32) 9216 activation_258[0][0]
__________________________________________________________________________________________________
conv2d_274 (Conv2D) (None, 25, 25, 64) 27648 activation_261[0][0]
__________________________________________________________________________________________________
batch_normalization_269 (BatchN (None, 25, 25, 32) 96 conv2d_269[0][0]
__________________________________________________________________________________________________
batch_normalization_271 (BatchN (None, 25, 25, 32) 96 conv2d_271[0][0]
__________________________________________________________________________________________________
batch_normalization_274 (BatchN (None, 25, 25, 64) 192 conv2d_274[0][0]
__________________________________________________________________________________________________
activation_257 (Activation) (None, 25, 25, 32) 0 batch_normalization_269[0][0]
__________________________________________________________________________________________________
activation_259 (Activation) (None, 25, 25, 32) 0 batch_normalization_271[0][0]
__________________________________________________________________________________________________
activation_262 (Activation) (None, 25, 25, 64) 0 batch_normalization_274[0][0]
__________________________________________________________________________________________________
block35_8_mixed (Concatenate) (None, 25, 25, 128) 0 activation_257[0][0]
activation_259[0][0]
activation_262[0][0]
__________________________________________________________________________________________________
block35_8_conv (Conv2D) (None, 25, 25, 320) 41280 block35_8_mixed[0][0]
__________________________________________________________________________________________________
block35_8 (Lambda) (None, 25, 25, 320) 0 block35_7_ac[0][0]
block35_8_conv[0][0]
__________________________________________________________________________________________________
block35_8_ac (Activation) (None, 25, 25, 320) 0 block35_8[0][0]
__________________________________________________________________________________________________
conv2d_278 (Conv2D) (None, 25, 25, 32) 10240 block35_8_ac[0][0]
__________________________________________________________________________________________________
batch_normalization_278 (BatchN (None, 25, 25, 32) 96 conv2d_278[0][0]
__________________________________________________________________________________________________
activation_266 (Activation) (None, 25, 25, 32) 0 batch_normalization_278[0][0]
__________________________________________________________________________________________________
conv2d_276 (Conv2D) (None, 25, 25, 32) 10240 block35_8_ac[0][0]
__________________________________________________________________________________________________
conv2d_279 (Conv2D) (None, 25, 25, 48) 13824 activation_266[0][0]
__________________________________________________________________________________________________
batch_normalization_276 (BatchN (None, 25, 25, 32) 96 conv2d_276[0][0]
__________________________________________________________________________________________________
batch_normalization_279 (BatchN (None, 25, 25, 48) 144 conv2d_279[0][0]
__________________________________________________________________________________________________
activation_264 (Activation) (None, 25, 25, 32) 0 batch_normalization_276[0][0]
__________________________________________________________________________________________________
activation_267 (Activation) (None, 25, 25, 48) 0 batch_normalization_279[0][0]
__________________________________________________________________________________________________
conv2d_275 (Conv2D) (None, 25, 25, 32) 10240 block35_8_ac[0][0]
__________________________________________________________________________________________________
conv2d_277 (Conv2D) (None, 25, 25, 32) 9216 activation_264[0][0]
__________________________________________________________________________________________________
conv2d_280 (Conv2D) (None, 25, 25, 64) 27648 activation_267[0][0]
__________________________________________________________________________________________________
batch_normalization_275 (BatchN (None, 25, 25, 32) 96 conv2d_275[0][0]
__________________________________________________________________________________________________
batch_normalization_277 (BatchN (None, 25, 25, 32) 96 conv2d_277[0][0]
__________________________________________________________________________________________________
batch_normalization_280 (BatchN (None, 25, 25, 64) 192 conv2d_280[0][0]
__________________________________________________________________________________________________
activation_263 (Activation) (None, 25, 25, 32) 0 batch_normalization_275[0][0]
__________________________________________________________________________________________________
activation_265 (Activation) (None, 25, 25, 32) 0 batch_normalization_277[0][0]
__________________________________________________________________________________________________
activation_268 (Activation) (None, 25, 25, 64) 0 batch_normalization_280[0][0]
__________________________________________________________________________________________________
block35_9_mixed (Concatenate) (None, 25, 25, 128) 0 activation_263[0][0]
activation_265[0][0]
activation_268[0][0]
__________________________________________________________________________________________________
block35_9_conv (Conv2D) (None, 25, 25, 320) 41280 block35_9_mixed[0][0]
__________________________________________________________________________________________________
block35_9 (Lambda) (None, 25, 25, 320) 0 block35_8_ac[0][0]
block35_9_conv[0][0]
__________________________________________________________________________________________________
block35_9_ac (Activation) (None, 25, 25, 320) 0 block35_9[0][0]
__________________________________________________________________________________________________
conv2d_284 (Conv2D) (None, 25, 25, 32) 10240 block35_9_ac[0][0]
__________________________________________________________________________________________________
batch_normalization_284 (BatchN (None, 25, 25, 32) 96 conv2d_284[0][0]
__________________________________________________________________________________________________
activation_272 (Activation) (None, 25, 25, 32) 0 batch_normalization_284[0][0]
__________________________________________________________________________________________________
conv2d_282 (Conv2D) (None, 25, 25, 32) 10240 block35_9_ac[0][0]
__________________________________________________________________________________________________
conv2d_285 (Conv2D) (None, 25, 25, 48) 13824 activation_272[0][0]
__________________________________________________________________________________________________
batch_normalization_282 (BatchN (None, 25, 25, 32) 96 conv2d_282[0][0]
__________________________________________________________________________________________________
batch_normalization_285 (BatchN (None, 25, 25, 48) 144 conv2d_285[0][0]
__________________________________________________________________________________________________
activation_270 (Activation) (None, 25, 25, 32) 0 batch_normalization_282[0][0]
__________________________________________________________________________________________________
activation_273 (Activation) (None, 25, 25, 48) 0 batch_normalization_285[0][0]
__________________________________________________________________________________________________
conv2d_281 (Conv2D) (None, 25, 25, 32) 10240 block35_9_ac[0][0]
__________________________________________________________________________________________________
conv2d_283 (Conv2D) (None, 25, 25, 32) 9216 activation_270[0][0]
__________________________________________________________________________________________________
conv2d_286 (Conv2D) (None, 25, 25, 64) 27648 activation_273[0][0]
__________________________________________________________________________________________________
batch_normalization_281 (BatchN (None, 25, 25, 32) 96 conv2d_281[0][0]
__________________________________________________________________________________________________
batch_normalization_283 (BatchN (None, 25, 25, 32) 96 conv2d_283[0][0]
__________________________________________________________________________________________________
batch_normalization_286 (BatchN (None, 25, 25, 64) 192 conv2d_286[0][0]
__________________________________________________________________________________________________
activation_269 (Activation) (None, 25, 25, 32) 0 batch_normalization_281[0][0]
__________________________________________________________________________________________________
activation_271 (Activation) (None, 25, 25, 32) 0 batch_normalization_283[0][0]
__________________________________________________________________________________________________
activation_274 (Activation) (None, 25, 25, 64) 0 batch_normalization_286[0][0]
__________________________________________________________________________________________________
block35_10_mixed (Concatenate) (None, 25, 25, 128) 0 activation_269[0][0]
activation_271[0][0]
activation_274[0][0]
__________________________________________________________________________________________________
block35_10_conv (Conv2D) (None, 25, 25, 320) 41280 block35_10_mixed[0][0]
__________________________________________________________________________________________________
block35_10 (Lambda) (None, 25, 25, 320) 0 block35_9_ac[0][0]
block35_10_conv[0][0]
__________________________________________________________________________________________________
block35_10_ac (Activation) (None, 25, 25, 320) 0 block35_10[0][0]
__________________________________________________________________________________________________
conv2d_288 (Conv2D) (None, 25, 25, 256) 81920 block35_10_ac[0][0]
__________________________________________________________________________________________________
batch_normalization_288 (BatchN (None, 25, 25, 256) 768 conv2d_288[0][0]
__________________________________________________________________________________________________
activation_276 (Activation) (None, 25, 25, 256) 0 batch_normalization_288[0][0]
__________________________________________________________________________________________________
conv2d_289 (Conv2D) (None, 25, 25, 256) 589824 activation_276[0][0]
__________________________________________________________________________________________________
batch_normalization_289 (BatchN (None, 25, 25, 256) 768 conv2d_289[0][0]
__________________________________________________________________________________________________
activation_277 (Activation) (None, 25, 25, 256) 0 batch_normalization_289[0][0]
__________________________________________________________________________________________________
conv2d_287 (Conv2D) (None, 12, 12, 384) 1105920 block35_10_ac[0][0]
__________________________________________________________________________________________________
conv2d_290 (Conv2D) (None, 12, 12, 384) 884736 activation_277[0][0]
__________________________________________________________________________________________________
batch_normalization_287 (BatchN (None, 12, 12, 384) 1152 conv2d_287[0][0]
__________________________________________________________________________________________________
batch_normalization_290 (BatchN (None, 12, 12, 384) 1152 conv2d_290[0][0]
__________________________________________________________________________________________________
activation_275 (Activation) (None, 12, 12, 384) 0 batch_normalization_287[0][0]
__________________________________________________________________________________________________
activation_278 (Activation) (None, 12, 12, 384) 0 batch_normalization_290[0][0]
__________________________________________________________________________________________________
max_pooling2d_6 (MaxPooling2D) (None, 12, 12, 320) 0 block35_10_ac[0][0]
__________________________________________________________________________________________________
mixed_6a (Concatenate) (None, 12, 12, 1088) 0 activation_275[0][0]
activation_278[0][0]
max_pooling2d_6[0][0]
__________________________________________________________________________________________________
conv2d_292 (Conv2D) (None, 12, 12, 128) 139264 mixed_6a[0][0]
__________________________________________________________________________________________________
batch_normalization_292 (BatchN (None, 12, 12, 128) 384 conv2d_292[0][0]
__________________________________________________________________________________________________
activation_280 (Activation) (None, 12, 12, 128) 0 batch_normalization_292[0][0]
__________________________________________________________________________________________________
conv2d_293 (Conv2D) (None, 12, 12, 160) 143360 activation_280[0][0]
__________________________________________________________________________________________________
batch_normalization_293 (BatchN (None, 12, 12, 160) 480 conv2d_293[0][0]
__________________________________________________________________________________________________
activation_281 (Activation) (None, 12, 12, 160) 0 batch_normalization_293[0][0]
__________________________________________________________________________________________________
conv2d_291 (Conv2D) (None, 12, 12, 192) 208896 mixed_6a[0][0]
__________________________________________________________________________________________________
conv2d_294 (Conv2D) (None, 12, 12, 192) 215040 activation_281[0][0]
__________________________________________________________________________________________________
batch_normalization_291 (BatchN (None, 12, 12, 192) 576 conv2d_291[0][0]
__________________________________________________________________________________________________
batch_normalization_294 (BatchN (None, 12, 12, 192) 576 conv2d_294[0][0]
__________________________________________________________________________________________________
activation_279 (Activation) (None, 12, 12, 192) 0 batch_normalization_291[0][0]
__________________________________________________________________________________________________
activation_282 (Activation) (None, 12, 12, 192) 0 batch_normalization_294[0][0]
__________________________________________________________________________________________________
block17_1_mixed (Concatenate) (None, 12, 12, 384) 0 activation_279[0][0]
activation_282[0][0]
__________________________________________________________________________________________________
block17_1_conv (Conv2D) (None, 12, 12, 1088) 418880 block17_1_mixed[0][0]
__________________________________________________________________________________________________
block17_1 (Lambda) (None, 12, 12, 1088) 0 mixed_6a[0][0]
block17_1_conv[0][0]
__________________________________________________________________________________________________
block17_1_ac (Activation) (None, 12, 12, 1088) 0 block17_1[0][0]
__________________________________________________________________________________________________
conv2d_296 (Conv2D) (None, 12, 12, 128) 139264 block17_1_ac[0][0]
__________________________________________________________________________________________________
batch_normalization_296 (BatchN (None, 12, 12, 128) 384 conv2d_296[0][0]
__________________________________________________________________________________________________
activation_284 (Activation) (None, 12, 12, 128) 0 batch_normalization_296[0][0]
__________________________________________________________________________________________________
conv2d_297 (Conv2D) (None, 12, 12, 160) 143360 activation_284[0][0]
__________________________________________________________________________________________________
batch_normalization_297 (BatchN (None, 12, 12, 160) 480 conv2d_297[0][0]
__________________________________________________________________________________________________
activation_285 (Activation) (None, 12, 12, 160) 0 batch_normalization_297[0][0]
__________________________________________________________________________________________________
conv2d_295 (Conv2D) (None, 12, 12, 192) 208896 block17_1_ac[0][0]
__________________________________________________________________________________________________
conv2d_298 (Conv2D) (None, 12, 12, 192) 215040 activation_285[0][0]
__________________________________________________________________________________________________
batch_normalization_295 (BatchN (None, 12, 12, 192) 576 conv2d_295[0][0]
__________________________________________________________________________________________________
batch_normalization_298 (BatchN (None, 12, 12, 192) 576 conv2d_298[0][0]
__________________________________________________________________________________________________
activation_283 (Activation) (None, 12, 12, 192) 0 batch_normalization_295[0][0]
__________________________________________________________________________________________________
activation_286 (Activation) (None, 12, 12, 192) 0 batch_normalization_298[0][0]
__________________________________________________________________________________________________
block17_2_mixed (Concatenate) (None, 12, 12, 384) 0 activation_283[0][0]
activation_286[0][0]
__________________________________________________________________________________________________
block17_2_conv (Conv2D) (None, 12, 12, 1088) 418880 block17_2_mixed[0][0]
__________________________________________________________________________________________________
block17_2 (Lambda) (None, 12, 12, 1088) 0 block17_1_ac[0][0]
block17_2_conv[0][0]
__________________________________________________________________________________________________
block17_2_ac (Activation) (None, 12, 12, 1088) 0 block17_2[0][0]
__________________________________________________________________________________________________
conv2d_300 (Conv2D) (None, 12, 12, 128) 139264 block17_2_ac[0][0]
__________________________________________________________________________________________________
batch_normalization_300 (BatchN (None, 12, 12, 128) 384 conv2d_300[0][0]
__________________________________________________________________________________________________
activation_288 (Activation) (None, 12, 12, 128) 0 batch_normalization_300[0][0]
__________________________________________________________________________________________________
conv2d_301 (Conv2D) (None, 12, 12, 160) 143360 activation_288[0][0]
__________________________________________________________________________________________________
batch_normalization_301 (BatchN (None, 12, 12, 160) 480 conv2d_301[0][0]
__________________________________________________________________________________________________
activation_289 (Activation) (None, 12, 12, 160) 0 batch_normalization_301[0][0]
__________________________________________________________________________________________________
conv2d_299 (Conv2D) (None, 12, 12, 192) 208896 block17_2_ac[0][0]
__________________________________________________________________________________________________
conv2d_302 (Conv2D) (None, 12, 12, 192) 215040 activation_289[0][0]
__________________________________________________________________________________________________
batch_normalization_299 (BatchN (None, 12, 12, 192) 576 conv2d_299[0][0]
__________________________________________________________________________________________________
batch_normalization_302 (BatchN (None, 12, 12, 192) 576 conv2d_302[0][0]
__________________________________________________________________________________________________
activation_287 (Activation) (None, 12, 12, 192) 0 batch_normalization_299[0][0]
__________________________________________________________________________________________________
activation_290 (Activation) (None, 12, 12, 192) 0 batch_normalization_302[0][0]
__________________________________________________________________________________________________
block17_3_mixed (Concatenate) (None, 12, 12, 384) 0 activation_287[0][0]
activation_290[0][0]
__________________________________________________________________________________________________
block17_3_conv (Conv2D) (None, 12, 12, 1088) 418880 block17_3_mixed[0][0]
__________________________________________________________________________________________________
block17_3 (Lambda) (None, 12, 12, 1088) 0 block17_2_ac[0][0]
block17_3_conv[0][0]
__________________________________________________________________________________________________
block17_3_ac (Activation) (None, 12, 12, 1088) 0 block17_3[0][0]
__________________________________________________________________________________________________
conv2d_304 (Conv2D) (None, 12, 12, 128) 139264 block17_3_ac[0][0]
__________________________________________________________________________________________________
batch_normalization_304 (BatchN (None, 12, 12, 128) 384 conv2d_304[0][0]
__________________________________________________________________________________________________
activation_292 (Activation) (None, 12, 12, 128) 0 batch_normalization_304[0][0]
__________________________________________________________________________________________________
conv2d_305 (Conv2D) (None, 12, 12, 160) 143360 activation_292[0][0]
__________________________________________________________________________________________________
batch_normalization_305 (BatchN (None, 12, 12, 160) 480 conv2d_305[0][0]
__________________________________________________________________________________________________
activation_293 (Activation) (None, 12, 12, 160) 0 batch_normalization_305[0][0]
__________________________________________________________________________________________________
conv2d_303 (Conv2D) (None, 12, 12, 192) 208896 block17_3_ac[0][0]
__________________________________________________________________________________________________
conv2d_306 (Conv2D) (None, 12, 12, 192) 215040 activation_293[0][0]
__________________________________________________________________________________________________
batch_normalization_303 (BatchN (None, 12, 12, 192) 576 conv2d_303[0][0]
__________________________________________________________________________________________________
batch_normalization_306 (BatchN (None, 12, 12, 192) 576 conv2d_306[0][0]
__________________________________________________________________________________________________
activation_291 (Activation) (None, 12, 12, 192) 0 batch_normalization_303[0][0]
__________________________________________________________________________________________________
activation_294 (Activation) (None, 12, 12, 192) 0 batch_normalization_306[0][0]
__________________________________________________________________________________________________
block17_4_mixed (Concatenate) (None, 12, 12, 384) 0 activation_291[0][0]
activation_294[0][0]
__________________________________________________________________________________________________
block17_4_conv (Conv2D) (None, 12, 12, 1088) 418880 block17_4_mixed[0][0]
__________________________________________________________________________________________________
block17_4 (Lambda) (None, 12, 12, 1088) 0 block17_3_ac[0][0]
block17_4_conv[0][0]
__________________________________________________________________________________________________
block17_4_ac (Activation) (None, 12, 12, 1088) 0 block17_4[0][0]
__________________________________________________________________________________________________
conv2d_308 (Conv2D) (None, 12, 12, 128) 139264 block17_4_ac[0][0]
__________________________________________________________________________________________________
batch_normalization_308 (BatchN (None, 12, 12, 128) 384 conv2d_308[0][0]
__________________________________________________________________________________________________
activation_296 (Activation) (None, 12, 12, 128) 0 batch_normalization_308[0][0]
__________________________________________________________________________________________________
conv2d_309 (Conv2D) (None, 12, 12, 160) 143360 activation_296[0][0]
__________________________________________________________________________________________________
batch_normalization_309 (BatchN (None, 12, 12, 160) 480 conv2d_309[0][0]
__________________________________________________________________________________________________
activation_297 (Activation) (None, 12, 12, 160) 0 batch_normalization_309[0][0]
__________________________________________________________________________________________________
conv2d_307 (Conv2D) (None, 12, 12, 192) 208896 block17_4_ac[0][0]
__________________________________________________________________________________________________
conv2d_310 (Conv2D) (None, 12, 12, 192) 215040 activation_297[0][0]
__________________________________________________________________________________________________
batch_normalization_307 (BatchN (None, 12, 12, 192) 576 conv2d_307[0][0]
__________________________________________________________________________________________________
batch_normalization_310 (BatchN (None, 12, 12, 192) 576 conv2d_310[0][0]
__________________________________________________________________________________________________
activation_295 (Activation) (None, 12, 12, 192) 0 batch_normalization_307[0][0]
__________________________________________________________________________________________________
activation_298 (Activation) (None, 12, 12, 192) 0 batch_normalization_310[0][0]
__________________________________________________________________________________________________
block17_5_mixed (Concatenate) (None, 12, 12, 384) 0 activation_295[0][0]
activation_298[0][0]
__________________________________________________________________________________________________
block17_5_conv (Conv2D) (None, 12, 12, 1088) 418880 block17_5_mixed[0][0]
__________________________________________________________________________________________________
block17_5 (Lambda) (None, 12, 12, 1088) 0 block17_4_ac[0][0]
block17_5_conv[0][0]
__________________________________________________________________________________________________
block17_5_ac (Activation) (None, 12, 12, 1088) 0 block17_5[0][0]
__________________________________________________________________________________________________
conv2d_312 (Conv2D) (None, 12, 12, 128) 139264 block17_5_ac[0][0]
__________________________________________________________________________________________________
batch_normalization_312 (BatchN (None, 12, 12, 128) 384 conv2d_312[0][0]
__________________________________________________________________________________________________
activation_300 (Activation) (None, 12, 12, 128) 0 batch_normalization_312[0][0]
__________________________________________________________________________________________________
conv2d_313 (Conv2D) (None, 12, 12, 160) 143360 activation_300[0][0]
__________________________________________________________________________________________________
batch_normalization_313 (BatchN (None, 12, 12, 160) 480 conv2d_313[0][0]
__________________________________________________________________________________________________
activation_301 (Activation) (None, 12, 12, 160) 0 batch_normalization_313[0][0]
__________________________________________________________________________________________________
conv2d_311 (Conv2D) (None, 12, 12, 192) 208896 block17_5_ac[0][0]
__________________________________________________________________________________________________
conv2d_314 (Conv2D) (None, 12, 12, 192) 215040 activation_301[0][0]
__________________________________________________________________________________________________
batch_normalization_311 (BatchN (None, 12, 12, 192) 576 conv2d_311[0][0]
__________________________________________________________________________________________________
batch_normalization_314 (BatchN (None, 12, 12, 192) 576 conv2d_314[0][0]
__________________________________________________________________________________________________
activation_299 (Activation) (None, 12, 12, 192) 0 batch_normalization_311[0][0]
__________________________________________________________________________________________________
activation_302 (Activation) (None, 12, 12, 192) 0 batch_normalization_314[0][0]
__________________________________________________________________________________________________
block17_6_mixed (Concatenate) (None, 12, 12, 384) 0 activation_299[0][0]
activation_302[0][0]
__________________________________________________________________________________________________
block17_6_conv (Conv2D) (None, 12, 12, 1088) 418880 block17_6_mixed[0][0]
__________________________________________________________________________________________________
block17_6 (Lambda) (None, 12, 12, 1088) 0 block17_5_ac[0][0]
block17_6_conv[0][0]
__________________________________________________________________________________________________
block17_6_ac (Activation) (None, 12, 12, 1088) 0 block17_6[0][0]
__________________________________________________________________________________________________
conv2d_316 (Conv2D) (None, 12, 12, 128) 139264 block17_6_ac[0][0]
__________________________________________________________________________________________________
batch_normalization_316 (BatchN (None, 12, 12, 128) 384 conv2d_316[0][0]
__________________________________________________________________________________________________
activation_304 (Activation) (None, 12, 12, 128) 0 batch_normalization_316[0][0]
__________________________________________________________________________________________________
conv2d_317 (Conv2D) (None, 12, 12, 160) 143360 activation_304[0][0]
__________________________________________________________________________________________________
batch_normalization_317 (BatchN (None, 12, 12, 160) 480 conv2d_317[0][0]
__________________________________________________________________________________________________
activation_305 (Activation) (None, 12, 12, 160) 0 batch_normalization_317[0][0]
__________________________________________________________________________________________________
conv2d_315 (Conv2D) (None, 12, 12, 192) 208896 block17_6_ac[0][0]
__________________________________________________________________________________________________
conv2d_318 (Conv2D) (None, 12, 12, 192) 215040 activation_305[0][0]
__________________________________________________________________________________________________
batch_normalization_315 (BatchN (None, 12, 12, 192) 576 conv2d_315[0][0]
__________________________________________________________________________________________________
batch_normalization_318 (BatchN (None, 12, 12, 192) 576 conv2d_318[0][0]
__________________________________________________________________________________________________
activation_303 (Activation) (None, 12, 12, 192) 0 batch_normalization_315[0][0]
__________________________________________________________________________________________________
activation_306 (Activation) (None, 12, 12, 192) 0 batch_normalization_318[0][0]
__________________________________________________________________________________________________
block17_7_mixed (Concatenate) (None, 12, 12, 384) 0 activation_303[0][0]
activation_306[0][0]
__________________________________________________________________________________________________
block17_7_conv (Conv2D) (None, 12, 12, 1088) 418880 block17_7_mixed[0][0]
__________________________________________________________________________________________________
block17_7 (Lambda) (None, 12, 12, 1088) 0 block17_6_ac[0][0]
block17_7_conv[0][0]
__________________________________________________________________________________________________
block17_7_ac (Activation) (None, 12, 12, 1088) 0 block17_7[0][0]
__________________________________________________________________________________________________
conv2d_320 (Conv2D) (None, 12, 12, 128) 139264 block17_7_ac[0][0]
__________________________________________________________________________________________________
batch_normalization_320 (BatchN (None, 12, 12, 128) 384 conv2d_320[0][0]
__________________________________________________________________________________________________
activation_308 (Activation) (None, 12, 12, 128) 0 batch_normalization_320[0][0]
__________________________________________________________________________________________________
conv2d_321 (Conv2D) (None, 12, 12, 160) 143360 activation_308[0][0]
__________________________________________________________________________________________________
batch_normalization_321 (BatchN (None, 12, 12, 160) 480 conv2d_321[0][0]
__________________________________________________________________________________________________
activation_309 (Activation) (None, 12, 12, 160) 0 batch_normalization_321[0][0]
__________________________________________________________________________________________________
conv2d_319 (Conv2D) (None, 12, 12, 192) 208896 block17_7_ac[0][0]
__________________________________________________________________________________________________
conv2d_322 (Conv2D) (None, 12, 12, 192) 215040 activation_309[0][0]
__________________________________________________________________________________________________
batch_normalization_319 (BatchN (None, 12, 12, 192) 576 conv2d_319[0][0]
__________________________________________________________________________________________________
batch_normalization_322 (BatchN (None, 12, 12, 192) 576 conv2d_322[0][0]
__________________________________________________________________________________________________
activation_307 (Activation) (None, 12, 12, 192) 0 batch_normalization_319[0][0]
__________________________________________________________________________________________________
activation_310 (Activation) (None, 12, 12, 192) 0 batch_normalization_322[0][0]
__________________________________________________________________________________________________
block17_8_mixed (Concatenate) (None, 12, 12, 384) 0 activation_307[0][0]
activation_310[0][0]
__________________________________________________________________________________________________
block17_8_conv (Conv2D) (None, 12, 12, 1088) 418880 block17_8_mixed[0][0]
__________________________________________________________________________________________________
block17_8 (Lambda) (None, 12, 12, 1088) 0 block17_7_ac[0][0]
block17_8_conv[0][0]
__________________________________________________________________________________________________
block17_8_ac (Activation) (None, 12, 12, 1088) 0 block17_8[0][0]
__________________________________________________________________________________________________
conv2d_324 (Conv2D) (None, 12, 12, 128) 139264 block17_8_ac[0][0]
__________________________________________________________________________________________________
batch_normalization_324 (BatchN (None, 12, 12, 128) 384 conv2d_324[0][0]
__________________________________________________________________________________________________
activation_312 (Activation) (None, 12, 12, 128) 0 batch_normalization_324[0][0]
__________________________________________________________________________________________________
conv2d_325 (Conv2D) (None, 12, 12, 160) 143360 activation_312[0][0]
__________________________________________________________________________________________________
batch_normalization_325 (BatchN (None, 12, 12, 160) 480 conv2d_325[0][0]
__________________________________________________________________________________________________
activation_313 (Activation) (None, 12, 12, 160) 0 batch_normalization_325[0][0]
__________________________________________________________________________________________________
conv2d_323 (Conv2D) (None, 12, 12, 192) 208896 block17_8_ac[0][0]
__________________________________________________________________________________________________
conv2d_326 (Conv2D) (None, 12, 12, 192) 215040 activation_313[0][0]
__________________________________________________________________________________________________
batch_normalization_323 (BatchN (None, 12, 12, 192) 576 conv2d_323[0][0]
__________________________________________________________________________________________________
batch_normalization_326 (BatchN (None, 12, 12, 192) 576 conv2d_326[0][0]
__________________________________________________________________________________________________
activation_311 (Activation) (None, 12, 12, 192) 0 batch_normalization_323[0][0]
__________________________________________________________________________________________________
activation_314 (Activation) (None, 12, 12, 192) 0 batch_normalization_326[0][0]
__________________________________________________________________________________________________
block17_9_mixed (Concatenate) (None, 12, 12, 384) 0 activation_311[0][0]
activation_314[0][0]
__________________________________________________________________________________________________
block17_9_conv (Conv2D) (None, 12, 12, 1088) 418880 block17_9_mixed[0][0]
__________________________________________________________________________________________________
block17_9 (Lambda) (None, 12, 12, 1088) 0 block17_8_ac[0][0]
block17_9_conv[0][0]
__________________________________________________________________________________________________
block17_9_ac (Activation) (None, 12, 12, 1088) 0 block17_9[0][0]
__________________________________________________________________________________________________
conv2d_328 (Conv2D) (None, 12, 12, 128) 139264 block17_9_ac[0][0]
__________________________________________________________________________________________________
batch_normalization_328 (BatchN (None, 12, 12, 128) 384 conv2d_328[0][0]
__________________________________________________________________________________________________
activation_316 (Activation) (None, 12, 12, 128) 0 batch_normalization_328[0][0]
__________________________________________________________________________________________________
conv2d_329 (Conv2D) (None, 12, 12, 160) 143360 activation_316[0][0]
__________________________________________________________________________________________________
batch_normalization_329 (BatchN (None, 12, 12, 160) 480 conv2d_329[0][0]
__________________________________________________________________________________________________
activation_317 (Activation) (None, 12, 12, 160) 0 batch_normalization_329[0][0]
__________________________________________________________________________________________________
conv2d_327 (Conv2D) (None, 12, 12, 192) 208896 block17_9_ac[0][0]
__________________________________________________________________________________________________
conv2d_330 (Conv2D) (None, 12, 12, 192) 215040 activation_317[0][0]
__________________________________________________________________________________________________
batch_normalization_327 (BatchN (None, 12, 12, 192) 576 conv2d_327[0][0]
__________________________________________________________________________________________________
batch_normalization_330 (BatchN (None, 12, 12, 192) 576 conv2d_330[0][0]
__________________________________________________________________________________________________
activation_315 (Activation) (None, 12, 12, 192) 0 batch_normalization_327[0][0]
__________________________________________________________________________________________________
activation_318 (Activation) (None, 12, 12, 192) 0 batch_normalization_330[0][0]
__________________________________________________________________________________________________
block17_10_mixed (Concatenate) (None, 12, 12, 384) 0 activation_315[0][0]
activation_318[0][0]
__________________________________________________________________________________________________
block17_10_conv (Conv2D) (None, 12, 12, 1088) 418880 block17_10_mixed[0][0]
__________________________________________________________________________________________________
block17_10 (Lambda) (None, 12, 12, 1088) 0 block17_9_ac[0][0]
block17_10_conv[0][0]
__________________________________________________________________________________________________
block17_10_ac (Activation) (None, 12, 12, 1088) 0 block17_10[0][0]
__________________________________________________________________________________________________
conv2d_332 (Conv2D) (None, 12, 12, 128) 139264 block17_10_ac[0][0]
__________________________________________________________________________________________________
batch_normalization_332 (BatchN (None, 12, 12, 128) 384 conv2d_332[0][0]
__________________________________________________________________________________________________
activation_320 (Activation) (None, 12, 12, 128) 0 batch_normalization_332[0][0]
__________________________________________________________________________________________________
conv2d_333 (Conv2D) (None, 12, 12, 160) 143360 activation_320[0][0]
__________________________________________________________________________________________________
batch_normalization_333 (BatchN (None, 12, 12, 160) 480 conv2d_333[0][0]
__________________________________________________________________________________________________
activation_321 (Activation) (None, 12, 12, 160) 0 batch_normalization_333[0][0]
__________________________________________________________________________________________________
conv2d_331 (Conv2D) (None, 12, 12, 192) 208896 block17_10_ac[0][0]
__________________________________________________________________________________________________
conv2d_334 (Conv2D) (None, 12, 12, 192) 215040 activation_321[0][0]
__________________________________________________________________________________________________
batch_normalization_331 (BatchN (None, 12, 12, 192) 576 conv2d_331[0][0]
__________________________________________________________________________________________________
batch_normalization_334 (BatchN (None, 12, 12, 192) 576 conv2d_334[0][0]
__________________________________________________________________________________________________
activation_319 (Activation) (None, 12, 12, 192) 0 batch_normalization_331[0][0]
__________________________________________________________________________________________________
activation_322 (Activation) (None, 12, 12, 192) 0 batch_normalization_334[0][0]
__________________________________________________________________________________________________
block17_11_mixed (Concatenate) (None, 12, 12, 384) 0 activation_319[0][0]
activation_322[0][0]
__________________________________________________________________________________________________
block17_11_conv (Conv2D) (None, 12, 12, 1088) 418880 block17_11_mixed[0][0]
__________________________________________________________________________________________________
block17_11 (Lambda) (None, 12, 12, 1088) 0 block17_10_ac[0][0]
block17_11_conv[0][0]
__________________________________________________________________________________________________
block17_11_ac (Activation) (None, 12, 12, 1088) 0 block17_11[0][0]
__________________________________________________________________________________________________
conv2d_336 (Conv2D) (None, 12, 12, 128) 139264 block17_11_ac[0][0]
__________________________________________________________________________________________________
batch_normalization_336 (BatchN (None, 12, 12, 128) 384 conv2d_336[0][0]
__________________________________________________________________________________________________
activation_324 (Activation) (None, 12, 12, 128) 0 batch_normalization_336[0][0]
__________________________________________________________________________________________________
conv2d_337 (Conv2D) (None, 12, 12, 160) 143360 activation_324[0][0]
__________________________________________________________________________________________________
batch_normalization_337 (BatchN (None, 12, 12, 160) 480 conv2d_337[0][0]
__________________________________________________________________________________________________
activation_325 (Activation) (None, 12, 12, 160) 0 batch_normalization_337[0][0]
__________________________________________________________________________________________________
conv2d_335 (Conv2D) (None, 12, 12, 192) 208896 block17_11_ac[0][0]
__________________________________________________________________________________________________
conv2d_338 (Conv2D) (None, 12, 12, 192) 215040 activation_325[0][0]
__________________________________________________________________________________________________
batch_normalization_335 (BatchN (None, 12, 12, 192) 576 conv2d_335[0][0]
__________________________________________________________________________________________________
batch_normalization_338 (BatchN (None, 12, 12, 192) 576 conv2d_338[0][0]
__________________________________________________________________________________________________
activation_323 (Activation) (None, 12, 12, 192) 0 batch_normalization_335[0][0]
__________________________________________________________________________________________________
activation_326 (Activation) (None, 12, 12, 192) 0 batch_normalization_338[0][0]
__________________________________________________________________________________________________
block17_12_mixed (Concatenate) (None, 12, 12, 384) 0 activation_323[0][0]
activation_326[0][0]
__________________________________________________________________________________________________
block17_12_conv (Conv2D) (None, 12, 12, 1088) 418880 block17_12_mixed[0][0]
__________________________________________________________________________________________________
block17_12 (Lambda) (None, 12, 12, 1088) 0 block17_11_ac[0][0]
block17_12_conv[0][0]
__________________________________________________________________________________________________
block17_12_ac (Activation) (None, 12, 12, 1088) 0 block17_12[0][0]
__________________________________________________________________________________________________
conv2d_340 (Conv2D) (None, 12, 12, 128) 139264 block17_12_ac[0][0]
__________________________________________________________________________________________________
batch_normalization_340 (BatchN (None, 12, 12, 128) 384 conv2d_340[0][0]
__________________________________________________________________________________________________
activation_328 (Activation) (None, 12, 12, 128) 0 batch_normalization_340[0][0]
__________________________________________________________________________________________________
conv2d_341 (Conv2D) (None, 12, 12, 160) 143360 activation_328[0][0]
__________________________________________________________________________________________________
batch_normalization_341 (BatchN (None, 12, 12, 160) 480 conv2d_341[0][0]
__________________________________________________________________________________________________
activation_329 (Activation) (None, 12, 12, 160) 0 batch_normalization_341[0][0]
__________________________________________________________________________________________________
conv2d_339 (Conv2D) (None, 12, 12, 192) 208896 block17_12_ac[0][0]
__________________________________________________________________________________________________
conv2d_342 (Conv2D) (None, 12, 12, 192) 215040 activation_329[0][0]
__________________________________________________________________________________________________
batch_normalization_339 (BatchN (None, 12, 12, 192) 576 conv2d_339[0][0]
__________________________________________________________________________________________________
batch_normalization_342 (BatchN (None, 12, 12, 192) 576 conv2d_342[0][0]
__________________________________________________________________________________________________
activation_327 (Activation) (None, 12, 12, 192) 0 batch_normalization_339[0][0]
__________________________________________________________________________________________________
activation_330 (Activation) (None, 12, 12, 192) 0 batch_normalization_342[0][0]
__________________________________________________________________________________________________
block17_13_mixed (Concatenate) (None, 12, 12, 384) 0 activation_327[0][0]
activation_330[0][0]
__________________________________________________________________________________________________
block17_13_conv (Conv2D) (None, 12, 12, 1088) 418880 block17_13_mixed[0][0]
__________________________________________________________________________________________________
block17_13 (Lambda) (None, 12, 12, 1088) 0 block17_12_ac[0][0]
block17_13_conv[0][0]
__________________________________________________________________________________________________
block17_13_ac (Activation) (None, 12, 12, 1088) 0 block17_13[0][0]
__________________________________________________________________________________________________
conv2d_344 (Conv2D) (None, 12, 12, 128) 139264 block17_13_ac[0][0]
__________________________________________________________________________________________________
batch_normalization_344 (BatchN (None, 12, 12, 128) 384 conv2d_344[0][0]
__________________________________________________________________________________________________
activation_332 (Activation) (None, 12, 12, 128) 0 batch_normalization_344[0][0]
__________________________________________________________________________________________________
conv2d_345 (Conv2D) (None, 12, 12, 160) 143360 activation_332[0][0]
__________________________________________________________________________________________________
batch_normalization_345 (BatchN (None, 12, 12, 160) 480 conv2d_345[0][0]
__________________________________________________________________________________________________
activation_333 (Activation) (None, 12, 12, 160) 0 batch_normalization_345[0][0]
__________________________________________________________________________________________________
conv2d_343 (Conv2D) (None, 12, 12, 192) 208896 block17_13_ac[0][0]
__________________________________________________________________________________________________
conv2d_346 (Conv2D) (None, 12, 12, 192) 215040 activation_333[0][0]
__________________________________________________________________________________________________
batch_normalization_343 (BatchN (None, 12, 12, 192) 576 conv2d_343[0][0]
__________________________________________________________________________________________________
batch_normalization_346 (BatchN (None, 12, 12, 192) 576 conv2d_346[0][0]
__________________________________________________________________________________________________
activation_331 (Activation) (None, 12, 12, 192) 0 batch_normalization_343[0][0]
__________________________________________________________________________________________________
activation_334 (Activation) (None, 12, 12, 192) 0 batch_normalization_346[0][0]
__________________________________________________________________________________________________
block17_14_mixed (Concatenate) (None, 12, 12, 384) 0 activation_331[0][0]
activation_334[0][0]
__________________________________________________________________________________________________
block17_14_conv (Conv2D) (None, 12, 12, 1088) 418880 block17_14_mixed[0][0]
__________________________________________________________________________________________________
block17_14 (Lambda) (None, 12, 12, 1088) 0 block17_13_ac[0][0]
block17_14_conv[0][0]
__________________________________________________________________________________________________
block17_14_ac (Activation) (None, 12, 12, 1088) 0 block17_14[0][0]
__________________________________________________________________________________________________
conv2d_348 (Conv2D) (None, 12, 12, 128) 139264 block17_14_ac[0][0]
__________________________________________________________________________________________________
batch_normalization_348 (BatchN (None, 12, 12, 128) 384 conv2d_348[0][0]
__________________________________________________________________________________________________
activation_336 (Activation) (None, 12, 12, 128) 0 batch_normalization_348[0][0]
__________________________________________________________________________________________________
conv2d_349 (Conv2D) (None, 12, 12, 160) 143360 activation_336[0][0]
__________________________________________________________________________________________________
batch_normalization_349 (BatchN (None, 12, 12, 160) 480 conv2d_349[0][0]
__________________________________________________________________________________________________
activation_337 (Activation) (None, 12, 12, 160) 0 batch_normalization_349[0][0]
__________________________________________________________________________________________________
conv2d_347 (Conv2D) (None, 12, 12, 192) 208896 block17_14_ac[0][0]
__________________________________________________________________________________________________
conv2d_350 (Conv2D) (None, 12, 12, 192) 215040 activation_337[0][0]
__________________________________________________________________________________________________
batch_normalization_347 (BatchN (None, 12, 12, 192) 576 conv2d_347[0][0]
__________________________________________________________________________________________________
batch_normalization_350 (BatchN (None, 12, 12, 192) 576 conv2d_350[0][0]
__________________________________________________________________________________________________
activation_335 (Activation) (None, 12, 12, 192) 0 batch_normalization_347[0][0]
__________________________________________________________________________________________________
activation_338 (Activation) (None, 12, 12, 192) 0 batch_normalization_350[0][0]
__________________________________________________________________________________________________
block17_15_mixed (Concatenate) (None, 12, 12, 384) 0 activation_335[0][0]
activation_338[0][0]
__________________________________________________________________________________________________
block17_15_conv (Conv2D) (None, 12, 12, 1088) 418880 block17_15_mixed[0][0]
__________________________________________________________________________________________________
block17_15 (Lambda) (None, 12, 12, 1088) 0 block17_14_ac[0][0]
block17_15_conv[0][0]
__________________________________________________________________________________________________
block17_15_ac (Activation) (None, 12, 12, 1088) 0 block17_15[0][0]
__________________________________________________________________________________________________
conv2d_352 (Conv2D) (None, 12, 12, 128) 139264 block17_15_ac[0][0]
__________________________________________________________________________________________________
batch_normalization_352 (BatchN (None, 12, 12, 128) 384 conv2d_352[0][0]
__________________________________________________________________________________________________
activation_340 (Activation) (None, 12, 12, 128) 0 batch_normalization_352[0][0]
__________________________________________________________________________________________________
conv2d_353 (Conv2D) (None, 12, 12, 160) 143360 activation_340[0][0]
__________________________________________________________________________________________________
batch_normalization_353 (BatchN (None, 12, 12, 160) 480 conv2d_353[0][0]
__________________________________________________________________________________________________
activation_341 (Activation) (None, 12, 12, 160) 0 batch_normalization_353[0][0]
__________________________________________________________________________________________________
conv2d_351 (Conv2D) (None, 12, 12, 192) 208896 block17_15_ac[0][0]
__________________________________________________________________________________________________
conv2d_354 (Conv2D) (None, 12, 12, 192) 215040 activation_341[0][0]
__________________________________________________________________________________________________
batch_normalization_351 (BatchN (None, 12, 12, 192) 576 conv2d_351[0][0]
__________________________________________________________________________________________________
batch_normalization_354 (BatchN (None, 12, 12, 192) 576 conv2d_354[0][0]
__________________________________________________________________________________________________
activation_339 (Activation) (None, 12, 12, 192) 0 batch_normalization_351[0][0]
__________________________________________________________________________________________________
activation_342 (Activation) (None, 12, 12, 192) 0 batch_normalization_354[0][0]
__________________________________________________________________________________________________
block17_16_mixed (Concatenate) (None, 12, 12, 384) 0 activation_339[0][0]
activation_342[0][0]
__________________________________________________________________________________________________
block17_16_conv (Conv2D) (None, 12, 12, 1088) 418880 block17_16_mixed[0][0]
__________________________________________________________________________________________________
block17_16 (Lambda) (None, 12, 12, 1088) 0 block17_15_ac[0][0]
block17_16_conv[0][0]
__________________________________________________________________________________________________
block17_16_ac (Activation) (None, 12, 12, 1088) 0 block17_16[0][0]
__________________________________________________________________________________________________
conv2d_356 (Conv2D) (None, 12, 12, 128) 139264 block17_16_ac[0][0]
__________________________________________________________________________________________________
batch_normalization_356 (BatchN (None, 12, 12, 128) 384 conv2d_356[0][0]
__________________________________________________________________________________________________
activation_344 (Activation) (None, 12, 12, 128) 0 batch_normalization_356[0][0]
__________________________________________________________________________________________________
conv2d_357 (Conv2D) (None, 12, 12, 160) 143360 activation_344[0][0]
__________________________________________________________________________________________________
batch_normalization_357 (BatchN (None, 12, 12, 160) 480 conv2d_357[0][0]
__________________________________________________________________________________________________
activation_345 (Activation) (None, 12, 12, 160) 0 batch_normalization_357[0][0]
__________________________________________________________________________________________________
conv2d_355 (Conv2D) (None, 12, 12, 192) 208896 block17_16_ac[0][0]
__________________________________________________________________________________________________
conv2d_358 (Conv2D) (None, 12, 12, 192) 215040 activation_345[0][0]
__________________________________________________________________________________________________
batch_normalization_355 (BatchN (None, 12, 12, 192) 576 conv2d_355[0][0]
__________________________________________________________________________________________________
batch_normalization_358 (BatchN (None, 12, 12, 192) 576 conv2d_358[0][0]
__________________________________________________________________________________________________
activation_343 (Activation) (None, 12, 12, 192) 0 batch_normalization_355[0][0]
__________________________________________________________________________________________________
activation_346 (Activation) (None, 12, 12, 192) 0 batch_normalization_358[0][0]
__________________________________________________________________________________________________
block17_17_mixed (Concatenate) (None, 12, 12, 384) 0 activation_343[0][0]
activation_346[0][0]
__________________________________________________________________________________________________
block17_17_conv (Conv2D) (None, 12, 12, 1088) 418880 block17_17_mixed[0][0]
__________________________________________________________________________________________________
block17_17 (Lambda) (None, 12, 12, 1088) 0 block17_16_ac[0][0]
block17_17_conv[0][0]
__________________________________________________________________________________________________
block17_17_ac (Activation) (None, 12, 12, 1088) 0 block17_17[0][0]
__________________________________________________________________________________________________
conv2d_360 (Conv2D) (None, 12, 12, 128) 139264 block17_17_ac[0][0]
__________________________________________________________________________________________________
batch_normalization_360 (BatchN (None, 12, 12, 128) 384 conv2d_360[0][0]
__________________________________________________________________________________________________
activation_348 (Activation) (None, 12, 12, 128) 0 batch_normalization_360[0][0]
__________________________________________________________________________________________________
conv2d_361 (Conv2D) (None, 12, 12, 160) 143360 activation_348[0][0]
__________________________________________________________________________________________________
batch_normalization_361 (BatchN (None, 12, 12, 160) 480 conv2d_361[0][0]
__________________________________________________________________________________________________
activation_349 (Activation) (None, 12, 12, 160) 0 batch_normalization_361[0][0]
__________________________________________________________________________________________________
conv2d_359 (Conv2D) (None, 12, 12, 192) 208896 block17_17_ac[0][0]
__________________________________________________________________________________________________
conv2d_362 (Conv2D) (None, 12, 12, 192) 215040 activation_349[0][0]
__________________________________________________________________________________________________
batch_normalization_359 (BatchN (None, 12, 12, 192) 576 conv2d_359[0][0]
__________________________________________________________________________________________________
batch_normalization_362 (BatchN (None, 12, 12, 192) 576 conv2d_362[0][0]
__________________________________________________________________________________________________
activation_347 (Activation) (None, 12, 12, 192) 0 batch_normalization_359[0][0]
__________________________________________________________________________________________________
activation_350 (Activation) (None, 12, 12, 192) 0 batch_normalization_362[0][0]
__________________________________________________________________________________________________
block17_18_mixed (Concatenate) (None, 12, 12, 384) 0 activation_347[0][0]
activation_350[0][0]
__________________________________________________________________________________________________
block17_18_conv (Conv2D) (None, 12, 12, 1088) 418880 block17_18_mixed[0][0]
__________________________________________________________________________________________________
block17_18 (Lambda) (None, 12, 12, 1088) 0 block17_17_ac[0][0]
block17_18_conv[0][0]
__________________________________________________________________________________________________
block17_18_ac (Activation) (None, 12, 12, 1088) 0 block17_18[0][0]
__________________________________________________________________________________________________
conv2d_364 (Conv2D) (None, 12, 12, 128) 139264 block17_18_ac[0][0]
__________________________________________________________________________________________________
batch_normalization_364 (BatchN (None, 12, 12, 128) 384 conv2d_364[0][0]
__________________________________________________________________________________________________
activation_352 (Activation) (None, 12, 12, 128) 0 batch_normalization_364[0][0]
__________________________________________________________________________________________________
conv2d_365 (Conv2D) (None, 12, 12, 160) 143360 activation_352[0][0]
__________________________________________________________________________________________________
batch_normalization_365 (BatchN (None, 12, 12, 160) 480 conv2d_365[0][0]
__________________________________________________________________________________________________
activation_353 (Activation) (None, 12, 12, 160) 0 batch_normalization_365[0][0]
__________________________________________________________________________________________________
conv2d_363 (Conv2D) (None, 12, 12, 192) 208896 block17_18_ac[0][0]
__________________________________________________________________________________________________
conv2d_366 (Conv2D) (None, 12, 12, 192) 215040 activation_353[0][0]
__________________________________________________________________________________________________
batch_normalization_363 (BatchN (None, 12, 12, 192) 576 conv2d_363[0][0]
__________________________________________________________________________________________________
batch_normalization_366 (BatchN (None, 12, 12, 192) 576 conv2d_366[0][0]
__________________________________________________________________________________________________
activation_351 (Activation) (None, 12, 12, 192) 0 batch_normalization_363[0][0]
__________________________________________________________________________________________________
activation_354 (Activation) (None, 12, 12, 192) 0 batch_normalization_366[0][0]
__________________________________________________________________________________________________
block17_19_mixed (Concatenate) (None, 12, 12, 384) 0 activation_351[0][0]
activation_354[0][0]
__________________________________________________________________________________________________
block17_19_conv (Conv2D) (None, 12, 12, 1088) 418880 block17_19_mixed[0][0]
__________________________________________________________________________________________________
block17_19 (Lambda) (None, 12, 12, 1088) 0 block17_18_ac[0][0]
block17_19_conv[0][0]
__________________________________________________________________________________________________
block17_19_ac (Activation) (None, 12, 12, 1088) 0 block17_19[0][0]
__________________________________________________________________________________________________
conv2d_368 (Conv2D) (None, 12, 12, 128) 139264 block17_19_ac[0][0]
__________________________________________________________________________________________________
batch_normalization_368 (BatchN (None, 12, 12, 128) 384 conv2d_368[0][0]
__________________________________________________________________________________________________
activation_356 (Activation) (None, 12, 12, 128) 0 batch_normalization_368[0][0]
__________________________________________________________________________________________________
conv2d_369 (Conv2D) (None, 12, 12, 160) 143360 activation_356[0][0]
__________________________________________________________________________________________________
batch_normalization_369 (BatchN (None, 12, 12, 160) 480 conv2d_369[0][0]
__________________________________________________________________________________________________
activation_357 (Activation) (None, 12, 12, 160) 0 batch_normalization_369[0][0]
__________________________________________________________________________________________________
conv2d_367 (Conv2D) (None, 12, 12, 192) 208896 block17_19_ac[0][0]
__________________________________________________________________________________________________
conv2d_370 (Conv2D) (None, 12, 12, 192) 215040 activation_357[0][0]
__________________________________________________________________________________________________
batch_normalization_367 (BatchN (None, 12, 12, 192) 576 conv2d_367[0][0]
__________________________________________________________________________________________________
batch_normalization_370 (BatchN (None, 12, 12, 192) 576 conv2d_370[0][0]
__________________________________________________________________________________________________
activation_355 (Activation) (None, 12, 12, 192) 0 batch_normalization_367[0][0]
__________________________________________________________________________________________________
activation_358 (Activation) (None, 12, 12, 192) 0 batch_normalization_370[0][0]
__________________________________________________________________________________________________
block17_20_mixed (Concatenate) (None, 12, 12, 384) 0 activation_355[0][0]
activation_358[0][0]
__________________________________________________________________________________________________
block17_20_conv (Conv2D) (None, 12, 12, 1088) 418880 block17_20_mixed[0][0]
__________________________________________________________________________________________________
block17_20 (Lambda) (None, 12, 12, 1088) 0 block17_19_ac[0][0]
block17_20_conv[0][0]
__________________________________________________________________________________________________
block17_20_ac (Activation) (None, 12, 12, 1088) 0 block17_20[0][0]
__________________________________________________________________________________________________
conv2d_375 (Conv2D) (None, 12, 12, 256) 278528 block17_20_ac[0][0]
__________________________________________________________________________________________________
batch_normalization_375 (BatchN (None, 12, 12, 256) 768 conv2d_375[0][0]
__________________________________________________________________________________________________
activation_363 (Activation) (None, 12, 12, 256) 0 batch_normalization_375[0][0]
__________________________________________________________________________________________________
conv2d_371 (Conv2D) (None, 12, 12, 256) 278528 block17_20_ac[0][0]
__________________________________________________________________________________________________
conv2d_373 (Conv2D) (None, 12, 12, 256) 278528 block17_20_ac[0][0]
__________________________________________________________________________________________________
conv2d_376 (Conv2D) (None, 12, 12, 288) 663552 activation_363[0][0]
__________________________________________________________________________________________________
batch_normalization_371 (BatchN (None, 12, 12, 256) 768 conv2d_371[0][0]
__________________________________________________________________________________________________
batch_normalization_373 (BatchN (None, 12, 12, 256) 768 conv2d_373[0][0]
__________________________________________________________________________________________________
batch_normalization_376 (BatchN (None, 12, 12, 288) 864 conv2d_376[0][0]
__________________________________________________________________________________________________
activation_359 (Activation) (None, 12, 12, 256) 0 batch_normalization_371[0][0]
__________________________________________________________________________________________________
activation_361 (Activation) (None, 12, 12, 256) 0 batch_normalization_373[0][0]
__________________________________________________________________________________________________
activation_364 (Activation) (None, 12, 12, 288) 0 batch_normalization_376[0][0]
__________________________________________________________________________________________________
conv2d_372 (Conv2D) (None, 5, 5, 384) 884736 activation_359[0][0]
__________________________________________________________________________________________________
conv2d_374 (Conv2D) (None, 5, 5, 288) 663552 activation_361[0][0]
__________________________________________________________________________________________________
conv2d_377 (Conv2D) (None, 5, 5, 320) 829440 activation_364[0][0]
__________________________________________________________________________________________________
batch_normalization_372 (BatchN (None, 5, 5, 384) 1152 conv2d_372[0][0]
__________________________________________________________________________________________________
batch_normalization_374 (BatchN (None, 5, 5, 288) 864 conv2d_374[0][0]
__________________________________________________________________________________________________
batch_normalization_377 (BatchN (None, 5, 5, 320) 960 conv2d_377[0][0]
__________________________________________________________________________________________________
activation_360 (Activation) (None, 5, 5, 384) 0 batch_normalization_372[0][0]
__________________________________________________________________________________________________
activation_362 (Activation) (None, 5, 5, 288) 0 batch_normalization_374[0][0]
__________________________________________________________________________________________________
activation_365 (Activation) (None, 5, 5, 320) 0 batch_normalization_377[0][0]
__________________________________________________________________________________________________
max_pooling2d_7 (MaxPooling2D) (None, 5, 5, 1088) 0 block17_20_ac[0][0]
__________________________________________________________________________________________________
mixed_7a (Concatenate) (None, 5, 5, 2080) 0 activation_360[0][0]
activation_362[0][0]
activation_365[0][0]
max_pooling2d_7[0][0]
__________________________________________________________________________________________________
conv2d_379 (Conv2D) (None, 5, 5, 192) 399360 mixed_7a[0][0]
__________________________________________________________________________________________________
batch_normalization_379 (BatchN (None, 5, 5, 192) 576 conv2d_379[0][0]
__________________________________________________________________________________________________
activation_367 (Activation) (None, 5, 5, 192) 0 batch_normalization_379[0][0]
__________________________________________________________________________________________________
conv2d_380 (Conv2D) (None, 5, 5, 224) 129024 activation_367[0][0]
__________________________________________________________________________________________________
batch_normalization_380 (BatchN (None, 5, 5, 224) 672 conv2d_380[0][0]
__________________________________________________________________________________________________
activation_368 (Activation) (None, 5, 5, 224) 0 batch_normalization_380[0][0]
__________________________________________________________________________________________________
conv2d_378 (Conv2D) (None, 5, 5, 192) 399360 mixed_7a[0][0]
__________________________________________________________________________________________________
conv2d_381 (Conv2D) (None, 5, 5, 256) 172032 activation_368[0][0]
__________________________________________________________________________________________________
batch_normalization_378 (BatchN (None, 5, 5, 192) 576 conv2d_378[0][0]
__________________________________________________________________________________________________
batch_normalization_381 (BatchN (None, 5, 5, 256) 768 conv2d_381[0][0]
__________________________________________________________________________________________________
activation_366 (Activation) (None, 5, 5, 192) 0 batch_normalization_378[0][0]
__________________________________________________________________________________________________
activation_369 (Activation) (None, 5, 5, 256) 0 batch_normalization_381[0][0]
__________________________________________________________________________________________________
block8_1_mixed (Concatenate) (None, 5, 5, 448) 0 activation_366[0][0]
activation_369[0][0]
__________________________________________________________________________________________________
block8_1_conv (Conv2D) (None, 5, 5, 2080) 933920 block8_1_mixed[0][0]
__________________________________________________________________________________________________
block8_1 (Lambda) (None, 5, 5, 2080) 0 mixed_7a[0][0]
block8_1_conv[0][0]
__________________________________________________________________________________________________
block8_1_ac (Activation) (None, 5, 5, 2080) 0 block8_1[0][0]
__________________________________________________________________________________________________
conv2d_383 (Conv2D) (None, 5, 5, 192) 399360 block8_1_ac[0][0]
__________________________________________________________________________________________________
batch_normalization_383 (BatchN (None, 5, 5, 192) 576 conv2d_383[0][0]
__________________________________________________________________________________________________
activation_371 (Activation) (None, 5, 5, 192) 0 batch_normalization_383[0][0]
__________________________________________________________________________________________________
conv2d_384 (Conv2D) (None, 5, 5, 224) 129024 activation_371[0][0]
__________________________________________________________________________________________________
batch_normalization_384 (BatchN (None, 5, 5, 224) 672 conv2d_384[0][0]
__________________________________________________________________________________________________
activation_372 (Activation) (None, 5, 5, 224) 0 batch_normalization_384[0][0]
__________________________________________________________________________________________________
conv2d_382 (Conv2D) (None, 5, 5, 192) 399360 block8_1_ac[0][0]
__________________________________________________________________________________________________
conv2d_385 (Conv2D) (None, 5, 5, 256) 172032 activation_372[0][0]
__________________________________________________________________________________________________
batch_normalization_382 (BatchN (None, 5, 5, 192) 576 conv2d_382[0][0]
__________________________________________________________________________________________________
batch_normalization_385 (BatchN (None, 5, 5, 256) 768 conv2d_385[0][0]
__________________________________________________________________________________________________
activation_370 (Activation) (None, 5, 5, 192) 0 batch_normalization_382[0][0]
__________________________________________________________________________________________________
activation_373 (Activation) (None, 5, 5, 256) 0 batch_normalization_385[0][0]
__________________________________________________________________________________________________
block8_2_mixed (Concatenate) (None, 5, 5, 448) 0 activation_370[0][0]
activation_373[0][0]
__________________________________________________________________________________________________
block8_2_conv (Conv2D) (None, 5, 5, 2080) 933920 block8_2_mixed[0][0]
__________________________________________________________________________________________________
block8_2 (Lambda) (None, 5, 5, 2080) 0 block8_1_ac[0][0]
block8_2_conv[0][0]
__________________________________________________________________________________________________
block8_2_ac (Activation) (None, 5, 5, 2080) 0 block8_2[0][0]
__________________________________________________________________________________________________
conv2d_387 (Conv2D) (None, 5, 5, 192) 399360 block8_2_ac[0][0]
__________________________________________________________________________________________________
batch_normalization_387 (BatchN (None, 5, 5, 192) 576 conv2d_387[0][0]
__________________________________________________________________________________________________
activation_375 (Activation) (None, 5, 5, 192) 0 batch_normalization_387[0][0]
__________________________________________________________________________________________________
conv2d_388 (Conv2D) (None, 5, 5, 224) 129024 activation_375[0][0]
__________________________________________________________________________________________________
batch_normalization_388 (BatchN (None, 5, 5, 224) 672 conv2d_388[0][0]
__________________________________________________________________________________________________
activation_376 (Activation) (None, 5, 5, 224) 0 batch_normalization_388[0][0]
__________________________________________________________________________________________________
conv2d_386 (Conv2D) (None, 5, 5, 192) 399360 block8_2_ac[0][0]
__________________________________________________________________________________________________
conv2d_389 (Conv2D) (None, 5, 5, 256) 172032 activation_376[0][0]
__________________________________________________________________________________________________
batch_normalization_386 (BatchN (None, 5, 5, 192) 576 conv2d_386[0][0]
__________________________________________________________________________________________________
batch_normalization_389 (BatchN (None, 5, 5, 256) 768 conv2d_389[0][0]
__________________________________________________________________________________________________
activation_374 (Activation) (None, 5, 5, 192) 0 batch_normalization_386[0][0]
__________________________________________________________________________________________________
activation_377 (Activation) (None, 5, 5, 256) 0 batch_normalization_389[0][0]
__________________________________________________________________________________________________
block8_3_mixed (Concatenate) (None, 5, 5, 448) 0 activation_374[0][0]
activation_377[0][0]
__________________________________________________________________________________________________
block8_3_conv (Conv2D) (None, 5, 5, 2080) 933920 block8_3_mixed[0][0]
__________________________________________________________________________________________________
block8_3 (Lambda) (None, 5, 5, 2080) 0 block8_2_ac[0][0]
block8_3_conv[0][0]
__________________________________________________________________________________________________
block8_3_ac (Activation) (None, 5, 5, 2080) 0 block8_3[0][0]
__________________________________________________________________________________________________
conv2d_391 (Conv2D) (None, 5, 5, 192) 399360 block8_3_ac[0][0]
__________________________________________________________________________________________________
batch_normalization_391 (BatchN (None, 5, 5, 192) 576 conv2d_391[0][0]
__________________________________________________________________________________________________
activation_379 (Activation) (None, 5, 5, 192) 0 batch_normalization_391[0][0]
__________________________________________________________________________________________________
conv2d_392 (Conv2D) (None, 5, 5, 224) 129024 activation_379[0][0]
__________________________________________________________________________________________________
batch_normalization_392 (BatchN (None, 5, 5, 224) 672 conv2d_392[0][0]
__________________________________________________________________________________________________
activation_380 (Activation) (None, 5, 5, 224) 0 batch_normalization_392[0][0]
__________________________________________________________________________________________________
conv2d_390 (Conv2D) (None, 5, 5, 192) 399360 block8_3_ac[0][0]
__________________________________________________________________________________________________
conv2d_393 (Conv2D) (None, 5, 5, 256) 172032 activation_380[0][0]
__________________________________________________________________________________________________
batch_normalization_390 (BatchN (None, 5, 5, 192) 576 conv2d_390[0][0]
__________________________________________________________________________________________________
batch_normalization_393 (BatchN (None, 5, 5, 256) 768 conv2d_393[0][0]
__________________________________________________________________________________________________
activation_378 (Activation) (None, 5, 5, 192) 0 batch_normalization_390[0][0]
__________________________________________________________________________________________________
activation_381 (Activation) (None, 5, 5, 256) 0 batch_normalization_393[0][0]
__________________________________________________________________________________________________
block8_4_mixed (Concatenate) (None, 5, 5, 448) 0 activation_378[0][0]
activation_381[0][0]
__________________________________________________________________________________________________
block8_4_conv (Conv2D) (None, 5, 5, 2080) 933920 block8_4_mixed[0][0]
__________________________________________________________________________________________________
block8_4 (Lambda) (None, 5, 5, 2080) 0 block8_3_ac[0][0]
block8_4_conv[0][0]
__________________________________________________________________________________________________
block8_4_ac (Activation) (None, 5, 5, 2080) 0 block8_4[0][0]
__________________________________________________________________________________________________
conv2d_395 (Conv2D) (None, 5, 5, 192) 399360 block8_4_ac[0][0]
__________________________________________________________________________________________________
batch_normalization_395 (BatchN (None, 5, 5, 192) 576 conv2d_395[0][0]
__________________________________________________________________________________________________
activation_383 (Activation) (None, 5, 5, 192) 0 batch_normalization_395[0][0]
__________________________________________________________________________________________________
conv2d_396 (Conv2D) (None, 5, 5, 224) 129024 activation_383[0][0]
__________________________________________________________________________________________________
batch_normalization_396 (BatchN (None, 5, 5, 224) 672 conv2d_396[0][0]
__________________________________________________________________________________________________
activation_384 (Activation) (None, 5, 5, 224) 0 batch_normalization_396[0][0]
__________________________________________________________________________________________________
conv2d_394 (Conv2D) (None, 5, 5, 192) 399360 block8_4_ac[0][0]
__________________________________________________________________________________________________
conv2d_397 (Conv2D) (None, 5, 5, 256) 172032 activation_384[0][0]
__________________________________________________________________________________________________
batch_normalization_394 (BatchN (None, 5, 5, 192) 576 conv2d_394[0][0]
__________________________________________________________________________________________________
batch_normalization_397 (BatchN (None, 5, 5, 256) 768 conv2d_397[0][0]
__________________________________________________________________________________________________
activation_382 (Activation) (None, 5, 5, 192) 0 batch_normalization_394[0][0]
__________________________________________________________________________________________________
activation_385 (Activation) (None, 5, 5, 256) 0 batch_normalization_397[0][0]
__________________________________________________________________________________________________
block8_5_mixed (Concatenate) (None, 5, 5, 448) 0 activation_382[0][0]
activation_385[0][0]
__________________________________________________________________________________________________
block8_5_conv (Conv2D) (None, 5, 5, 2080) 933920 block8_5_mixed[0][0]
__________________________________________________________________________________________________
block8_5 (Lambda) (None, 5, 5, 2080) 0 block8_4_ac[0][0]
block8_5_conv[0][0]
__________________________________________________________________________________________________
block8_5_ac (Activation) (None, 5, 5, 2080) 0 block8_5[0][0]
__________________________________________________________________________________________________
conv2d_399 (Conv2D) (None, 5, 5, 192) 399360 block8_5_ac[0][0]
__________________________________________________________________________________________________
batch_normalization_399 (BatchN (None, 5, 5, 192) 576 conv2d_399[0][0]
__________________________________________________________________________________________________
activation_387 (Activation) (None, 5, 5, 192) 0 batch_normalization_399[0][0]
__________________________________________________________________________________________________
conv2d_400 (Conv2D) (None, 5, 5, 224) 129024 activation_387[0][0]
__________________________________________________________________________________________________
batch_normalization_400 (BatchN (None, 5, 5, 224) 672 conv2d_400[0][0]
__________________________________________________________________________________________________
activation_388 (Activation) (None, 5, 5, 224) 0 batch_normalization_400[0][0]
__________________________________________________________________________________________________
conv2d_398 (Conv2D) (None, 5, 5, 192) 399360 block8_5_ac[0][0]
__________________________________________________________________________________________________
conv2d_401 (Conv2D) (None, 5, 5, 256) 172032 activation_388[0][0]
__________________________________________________________________________________________________
batch_normalization_398 (BatchN (None, 5, 5, 192) 576 conv2d_398[0][0]
__________________________________________________________________________________________________
batch_normalization_401 (BatchN (None, 5, 5, 256) 768 conv2d_401[0][0]
__________________________________________________________________________________________________
activation_386 (Activation) (None, 5, 5, 192) 0 batch_normalization_398[0][0]
__________________________________________________________________________________________________
activation_389 (Activation) (None, 5, 5, 256) 0 batch_normalization_401[0][0]
__________________________________________________________________________________________________
block8_6_mixed (Concatenate) (None, 5, 5, 448) 0 activation_386[0][0]
activation_389[0][0]
__________________________________________________________________________________________________
block8_6_conv (Conv2D) (None, 5, 5, 2080) 933920 block8_6_mixed[0][0]
__________________________________________________________________________________________________
block8_6 (Lambda) (None, 5, 5, 2080) 0 block8_5_ac[0][0]
block8_6_conv[0][0]
__________________________________________________________________________________________________
block8_6_ac (Activation) (None, 5, 5, 2080) 0 block8_6[0][0]
__________________________________________________________________________________________________
conv2d_403 (Conv2D) (None, 5, 5, 192) 399360 block8_6_ac[0][0]
__________________________________________________________________________________________________
batch_normalization_403 (BatchN (None, 5, 5, 192) 576 conv2d_403[0][0]
__________________________________________________________________________________________________
activation_391 (Activation) (None, 5, 5, 192) 0 batch_normalization_403[0][0]
__________________________________________________________________________________________________
conv2d_404 (Conv2D) (None, 5, 5, 224) 129024 activation_391[0][0]
__________________________________________________________________________________________________
batch_normalization_404 (BatchN (None, 5, 5, 224) 672 conv2d_404[0][0]
__________________________________________________________________________________________________
activation_392 (Activation) (None, 5, 5, 224) 0 batch_normalization_404[0][0]
__________________________________________________________________________________________________
conv2d_402 (Conv2D) (None, 5, 5, 192) 399360 block8_6_ac[0][0]
__________________________________________________________________________________________________
conv2d_405 (Conv2D) (None, 5, 5, 256) 172032 activation_392[0][0]
__________________________________________________________________________________________________
batch_normalization_402 (BatchN (None, 5, 5, 192) 576 conv2d_402[0][0]
__________________________________________________________________________________________________
batch_normalization_405 (BatchN (None, 5, 5, 256) 768 conv2d_405[0][0]
__________________________________________________________________________________________________
activation_390 (Activation) (None, 5, 5, 192) 0 batch_normalization_402[0][0]
__________________________________________________________________________________________________
activation_393 (Activation) (None, 5, 5, 256) 0 batch_normalization_405[0][0]
__________________________________________________________________________________________________
block8_7_mixed (Concatenate) (None, 5, 5, 448) 0 activation_390[0][0]
activation_393[0][0]
__________________________________________________________________________________________________
block8_7_conv (Conv2D) (None, 5, 5, 2080) 933920 block8_7_mixed[0][0]
__________________________________________________________________________________________________
block8_7 (Lambda) (None, 5, 5, 2080) 0 block8_6_ac[0][0]
block8_7_conv[0][0]
__________________________________________________________________________________________________
block8_7_ac (Activation) (None, 5, 5, 2080) 0 block8_7[0][0]
__________________________________________________________________________________________________
conv2d_407 (Conv2D) (None, 5, 5, 192) 399360 block8_7_ac[0][0]
__________________________________________________________________________________________________
batch_normalization_407 (BatchN (None, 5, 5, 192) 576 conv2d_407[0][0]
__________________________________________________________________________________________________
activation_395 (Activation) (None, 5, 5, 192) 0 batch_normalization_407[0][0]
__________________________________________________________________________________________________
conv2d_408 (Conv2D) (None, 5, 5, 224) 129024 activation_395[0][0]
__________________________________________________________________________________________________
batch_normalization_408 (BatchN (None, 5, 5, 224) 672 conv2d_408[0][0]
__________________________________________________________________________________________________
activation_396 (Activation) (None, 5, 5, 224) 0 batch_normalization_408[0][0]
__________________________________________________________________________________________________
conv2d_406 (Conv2D) (None, 5, 5, 192) 399360 block8_7_ac[0][0]
__________________________________________________________________________________________________
conv2d_409 (Conv2D) (None, 5, 5, 256) 172032 activation_396[0][0]
__________________________________________________________________________________________________
batch_normalization_406 (BatchN (None, 5, 5, 192) 576 conv2d_406[0][0]
__________________________________________________________________________________________________
batch_normalization_409 (BatchN (None, 5, 5, 256) 768 conv2d_409[0][0]
__________________________________________________________________________________________________
activation_394 (Activation) (None, 5, 5, 192) 0 batch_normalization_406[0][0]
__________________________________________________________________________________________________
activation_397 (Activation) (None, 5, 5, 256) 0 batch_normalization_409[0][0]
__________________________________________________________________________________________________
block8_8_mixed (Concatenate) (None, 5, 5, 448) 0 activation_394[0][0]
activation_397[0][0]
__________________________________________________________________________________________________
block8_8_conv (Conv2D) (None, 5, 5, 2080) 933920 block8_8_mixed[0][0]
__________________________________________________________________________________________________
block8_8 (Lambda) (None, 5, 5, 2080) 0 block8_7_ac[0][0]
block8_8_conv[0][0]
__________________________________________________________________________________________________
block8_8_ac (Activation) (None, 5, 5, 2080) 0 block8_8[0][0]
__________________________________________________________________________________________________
conv2d_411 (Conv2D) (None, 5, 5, 192) 399360 block8_8_ac[0][0]
__________________________________________________________________________________________________
batch_normalization_411 (BatchN (None, 5, 5, 192) 576 conv2d_411[0][0]
__________________________________________________________________________________________________
activation_399 (Activation) (None, 5, 5, 192) 0 batch_normalization_411[0][0]
__________________________________________________________________________________________________
conv2d_412 (Conv2D) (None, 5, 5, 224) 129024 activation_399[0][0]
__________________________________________________________________________________________________
batch_normalization_412 (BatchN (None, 5, 5, 224) 672 conv2d_412[0][0]
__________________________________________________________________________________________________
activation_400 (Activation) (None, 5, 5, 224) 0 batch_normalization_412[0][0]
__________________________________________________________________________________________________
conv2d_410 (Conv2D) (None, 5, 5, 192) 399360 block8_8_ac[0][0]
__________________________________________________________________________________________________
conv2d_413 (Conv2D) (None, 5, 5, 256) 172032 activation_400[0][0]
__________________________________________________________________________________________________
batch_normalization_410 (BatchN (None, 5, 5, 192) 576 conv2d_410[0][0]
__________________________________________________________________________________________________
batch_normalization_413 (BatchN (None, 5, 5, 256) 768 conv2d_413[0][0]
__________________________________________________________________________________________________
activation_398 (Activation) (None, 5, 5, 192) 0 batch_normalization_410[0][0]
__________________________________________________________________________________________________
activation_401 (Activation) (None, 5, 5, 256) 0 batch_normalization_413[0][0]
__________________________________________________________________________________________________
block8_9_mixed (Concatenate) (None, 5, 5, 448) 0 activation_398[0][0]
activation_401[0][0]
__________________________________________________________________________________________________
block8_9_conv (Conv2D) (None, 5, 5, 2080) 933920 block8_9_mixed[0][0]
__________________________________________________________________________________________________
block8_9 (Lambda) (None, 5, 5, 2080) 0 block8_8_ac[0][0]
block8_9_conv[0][0]
__________________________________________________________________________________________________
block8_9_ac (Activation) (None, 5, 5, 2080) 0 block8_9[0][0]
__________________________________________________________________________________________________
conv2d_415 (Conv2D) (None, 5, 5, 192) 399360 block8_9_ac[0][0]
__________________________________________________________________________________________________
batch_normalization_415 (BatchN (None, 5, 5, 192) 576 conv2d_415[0][0]
__________________________________________________________________________________________________
activation_403 (Activation) (None, 5, 5, 192) 0 batch_normalization_415[0][0]
__________________________________________________________________________________________________
conv2d_416 (Conv2D) (None, 5, 5, 224) 129024 activation_403[0][0]
__________________________________________________________________________________________________
batch_normalization_416 (BatchN (None, 5, 5, 224) 672 conv2d_416[0][0]
__________________________________________________________________________________________________
activation_404 (Activation) (None, 5, 5, 224) 0 batch_normalization_416[0][0]
__________________________________________________________________________________________________
conv2d_414 (Conv2D) (None, 5, 5, 192) 399360 block8_9_ac[0][0]
__________________________________________________________________________________________________
conv2d_417 (Conv2D) (None, 5, 5, 256) 172032 activation_404[0][0]
__________________________________________________________________________________________________
batch_normalization_414 (BatchN (None, 5, 5, 192) 576 conv2d_414[0][0]
__________________________________________________________________________________________________
batch_normalization_417 (BatchN (None, 5, 5, 256) 768 conv2d_417[0][0]
__________________________________________________________________________________________________
activation_402 (Activation) (None, 5, 5, 192) 0 batch_normalization_414[0][0]
__________________________________________________________________________________________________
activation_405 (Activation) (None, 5, 5, 256) 0 batch_normalization_417[0][0]
__________________________________________________________________________________________________
block8_10_mixed (Concatenate) (None, 5, 5, 448) 0 activation_402[0][0]
activation_405[0][0]
__________________________________________________________________________________________________
block8_10_conv (Conv2D) (None, 5, 5, 2080) 933920 block8_10_mixed[0][0]
__________________________________________________________________________________________________
block8_10 (Lambda) (None, 5, 5, 2080) 0 block8_9_ac[0][0]
block8_10_conv[0][0]
__________________________________________________________________________________________________
conv_7b (Conv2D) (None, 5, 5, 1536) 3194880 block8_10[0][0]
__________________________________________________________________________________________________
conv_7b_bn (BatchNormalization) (None, 5, 5, 1536) 4608 conv_7b[0][0]
__________________________________________________________________________________________________
conv_7b_ac (Activation) (None, 5, 5, 1536) 0 conv_7b_bn[0][0]
__________________________________________________________________________________________________
flatten (Flatten) (None, 38400) 0 conv_7b_ac[0][0]
__________________________________________________________________________________________________
dropout_11 (Dropout) (None, 38400) 0 flatten[0][0]
__________________________________________________________________________________________________
dense_11 (Dense) (None, 5) 192005 dropout_11[0][0]
==================================================================================================
Total params: 54,528,741
Trainable params: 54,468,197
Non-trainable params: 60,544
__________________________________________________________________________________________________
history= model_inceptionNet.fit_generator(generator=train_generator,
steps_per_epoch=1000,
validation_data=valid_generator,
validation_steps=1000,
epochs=10)
Epoch 1/10 91/91 [==============================] - 3s 28ms/step - loss: 0.4890 - accuracy: 0.8415 - val_loss: 0.1080 - val_accuracy: 0.9747 Epoch 2/10 91/91 [==============================] - 2s 21ms/step - loss: 0.0869 - accuracy: 0.9806 - val_loss: 0.0376 - val_accuracy: 0.9898 Epoch 3/10 91/91 [==============================] - 2s 21ms/step - loss: 0.0451 - accuracy: 0.9884 - val_loss: 0.0267 - val_accuracy: 0.9898 Epoch 4/10 91/91 [==============================] - 2s 22ms/step - loss: 0.0312 - accuracy: 0.9896 - val_loss: 0.0226 - val_accuracy: 0.9929 Epoch 5/10 91/91 [==============================] - 2s 22ms/step - loss: 0.0279 - accuracy: 0.9925 - val_loss: 0.0189 - val_accuracy: 0.9947 Epoch 6/10 91/91 [==============================] - 2s 23ms/step - loss: 0.0177 - accuracy: 0.9955 - val_loss: 0.0144 - val_accuracy: 0.9965 Epoch 7/10 91/91 [==============================] - 2s 23ms/step - loss: 0.0142 - accuracy: 0.9965 - val_loss: 0.0119 - val_accuracy: 0.9973 Epoch 8/10 91/91 [==============================] - 3s 31ms/step - loss: 0.0130 - accuracy: 0.9972 - val_loss: 0.0154 - val_accuracy: 0.9973 Epoch 9/10 91/91 [==============================] - 2s 27ms/step - loss: 0.0094 - accuracy: 0.9974 - val_loss: 0.0110 - val_accuracy: 0.9965 Epoch 10/10 91/91 [==============================] - 2s 26ms/step - loss: 0.0088 - accuracy: 0.9980 - val_loss: 0.0085 - val_accuracy: 0.9982
accr = model_inceptionNet.evaluate(X_test,y_test)
151/151 [==============================] - 1s 5ms/step - loss: 0.0192 - accuracy: 0.9944
y_pred = model_inceptionNet.predict(X_test)
y_pred=y_pred.argmax(axis=1)
acc4=accr[1]
print('Test set\n Accuracy: {:0.5f}'.format(accr[1]))
Test set Accuracy: 0.99441
print('\n')
print("Precision, Recall, F1")
print('\n')
labels =['No DR', 'Moderate', 'Mild', 'Proliferative DR', 'Severe']
CR=classification_report(y_test, y_pred, target_names=labels)
print(CR)
print('\n')
Precision, Recall, F1
precision recall f1-score support
No DR 1.00 0.98 0.99 297
Moderate 0.99 1.00 0.99 694
Mild 1.00 0.99 0.99 1664
Proliferative DR 1.00 0.99 1.00 960
Severe 0.99 1.00 1.00 1215
accuracy 0.99 4830
macro avg 0.99 0.99 0.99 4830
weighted avg 0.99 0.99 0.99 4830
CM=confusion_matrix(y_test, y_pred)
fig, ax = plot_confusion_matrix(conf_mat=CM,figsize=(10, 10),
show_absolute=True,
show_normed=True,
colorbar=False)
ax.set_xticklabels([''] + labels)
ax.set_yticklabels([''] + labels)
plt.show()
x = PrettyTable()
print('\n')
print("Comparison of all models results")
x.field_names = ["Model", "Accuracy"]
x.add_row(["Resnet Model", round(acc1,5)])
x.add_row(["DenseNet Model", round(acc2,5)])
x.add_row(["Exception Model", round(acc3,5)])
x.add_row(["InceptionResNet Model", round(acc4,5)])
print(x)
print('\n')
Comparison of all models results +-----------------------+----------+ | Model | Accuracy | +-----------------------+----------+ | Resnet Model | 0.99669 | | DenseNet Model | 0.99358 | | Exception Model | 0.99627 | | InceptionResNet Model | 0.99441 | +-----------------------+----------+